From 8000cf131592b6edcded65ef4be20b8ac0f1bfd3 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Tue, 16 Mar 2021 16:44:25 +0100 Subject: [PATCH 01/52] Return m.change_password.enabled=false if local database is disabled (#9588) Instead of if the user does not have a password hash. This allows a SSO user to add a password to their account, but only if the local password database is configured. --- changelog.d/9588.bugfix | 1 + synapse/handlers/auth.py | 13 +++++++ synapse/rest/client/v2_alpha/capabilities.py | 23 ++++++------ .../rest/client/v2_alpha/test_capabilities.py | 36 ++++++++++++++++--- 4 files changed, 58 insertions(+), 15 deletions(-) create mode 100644 changelog.d/9588.bugfix diff --git a/changelog.d/9588.bugfix b/changelog.d/9588.bugfix new file mode 100644 index 000000000..b8d614056 --- /dev/null +++ b/changelog.d/9588.bugfix @@ -0,0 +1 @@ +Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index fb5f8118f..badac8c26 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -886,6 +886,19 @@ class AuthHandler(BaseHandler): ) return result + def can_change_password(self) -> bool: + """Get whether users on this server are allowed to change or set a password. + + Both `config.password_enabled` and `config.password_localdb_enabled` must be true. + + Note that any account (even SSO accounts) are allowed to add passwords if the above + is true. + + Returns: + Whether users on this server are allowed to change or set a password + """ + return self._password_enabled and self._password_localdb_enabled + def get_supported_login_types(self) -> Iterable[str]: """Get a the login types supported for the /login API diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py index 76879ac55..44ccf10ed 100644 --- a/synapse/rest/client/v2_alpha/capabilities.py +++ b/synapse/rest/client/v2_alpha/capabilities.py @@ -13,12 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.http.servlet import RestServlet +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -27,21 +33,16 @@ class CapabilitiesRestServlet(RestServlet): PATTERNS = client_patterns("/capabilities$") - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.config = hs.config self.auth = hs.get_auth() - self.store = hs.get_datastore() + self.auth_handler = hs.get_auth_handler() - async def on_GET(self, request): - requester = await self.auth.get_user_by_req(request, allow_guest=True) - user = await self.store.get_user_by_id(requester.user.to_string()) - change_password = bool(user["password_hash"]) + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await self.auth.get_user_by_req(request, allow_guest=True) + change_password = self.auth_handler.can_change_password() response = { "capabilities": { @@ -58,5 +59,5 @@ class CapabilitiesRestServlet(RestServlet): return 200, response -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server): CapabilitiesRestServlet(hs).register(http_server) diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py index e808339fb..287a1a485 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/v2_alpha/test_capabilities.py @@ -18,6 +18,7 @@ from synapse.rest.client.v1 import login from synapse.rest.client.v2_alpha import capabilities from tests import unittest +from tests.unittest import override_config class CapabilitiesTestCase(unittest.HomeserverTestCase): @@ -33,6 +34,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): hs = self.setup_test_homeserver() self.store = hs.get_datastore() self.config = hs.config + self.auth_handler = hs.get_auth_handler() return hs def test_check_auth_required(self): @@ -56,7 +58,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): capabilities["m.room_versions"]["default"], ) - def test_get_change_password_capabilities(self): + def test_get_change_password_capabilities_password_login(self): localpart = "user" password = "pass" user = self.register_user(localpart, password) @@ -66,10 +68,36 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase): capabilities = channel.json_body["capabilities"] self.assertEqual(channel.code, 200) - - # Test case where password is handled outside of Synapse self.assertTrue(capabilities["m.change_password"]["enabled"]) - self.get_success(self.store.user_set_password_hash(user, None)) + + @override_config({"password_config": {"localdb_enabled": False}}) + def test_get_change_password_capabilities_localdb_disabled(self): + localpart = "user" + password = "pass" + user = self.register_user(localpart, password) + access_token = self.get_success( + self.auth_handler.get_access_token_for_user_id( + user, device_id=None, valid_until_ms=None + ) + ) + + channel = self.make_request("GET", self.url, access_token=access_token) + capabilities = channel.json_body["capabilities"] + + self.assertEqual(channel.code, 200) + self.assertFalse(capabilities["m.change_password"]["enabled"]) + + @override_config({"password_config": {"enabled": False}}) + def test_get_change_password_capabilities_password_disabled(self): + localpart = "user" + password = "pass" + user = self.register_user(localpart, password) + access_token = self.get_success( + self.auth_handler.get_access_token_for_user_id( + user, device_id=None, valid_until_ms=None + ) + ) + channel = self.make_request("GET", self.url, access_token=access_token) capabilities = channel.json_body["capabilities"] From dd5e5dc1d6c88a3532d25f18cfc312d8bc813473 Mon Sep 17 00:00:00 2001 From: Hubbe Date: Tue, 16 Mar 2021 17:46:07 +0200 Subject: [PATCH 02/52] Add SSO attribute requirements for OIDC providers (#9609) Allows limiting who can login using OIDC via the claims made from the IdP. --- changelog.d/9609.feature | 1 + docs/sample_config.yaml | 24 ++++++ synapse/config/oidc_config.py | 40 +++++++++- synapse/handlers/oidc_handler.py | 13 +++ tests/handlers/test_oidc.py | 132 +++++++++++++++++++++++++++++++ 5 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 changelog.d/9609.feature diff --git a/changelog.d/9609.feature b/changelog.d/9609.feature new file mode 100644 index 000000000..f3b634206 --- /dev/null +++ b/changelog.d/9609.feature @@ -0,0 +1 @@ +Logins using OpenID Connect can require attributes on the `userinfo` response in order to login. Contributed by Hubbe King. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 7de000f4a..a9f59e39f 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1873,6 +1873,24 @@ saml2_config: # which is set to the claims returned by the UserInfo Endpoint and/or # in the ID Token. # +# It is possible to configure Synapse to only allow logins if certain attributes +# match particular values in the OIDC userinfo. The requirements can be listed under +# `attribute_requirements` as shown below. All of the listed attributes must +# match for the login to be permitted. Additional attributes can be added to +# userinfo by expanding the `scopes` section of the OIDC config to retrieve +# additional information from the OIDC provider. +# +# If the OIDC claim is a list, then the attribute must match any value in the list. +# Otherwise, it must exactly match the value of the claim. Using the example +# below, the `family_name` claim MUST be "Stephensson", but the `groups` +# claim MUST contain "admin". +# +# attribute_requirements: +# - attribute: family_name +# value: "Stephensson" +# - attribute: groups +# value: "admin" +# # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md # for information on how to configure these options. # @@ -1905,6 +1923,9 @@ oidc_providers: # localpart_template: "{{ user.login }}" # display_name_template: "{{ user.name }}" # email_template: "{{ user.email }}" + # attribute_requirements: + # - attribute: userGroup + # value: "synapseUsers" # For use with Keycloak # @@ -1914,6 +1935,9 @@ oidc_providers: # client_id: "synapse" # client_secret: "copy secret generated in Keycloak UI" # scopes: ["openid", "profile"] + # attribute_requirements: + # - attribute: groups + # value: "admin" # For use with Github # diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index 2bfb537c1..eab042a08 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -15,11 +15,12 @@ # limitations under the License. from collections import Counter -from typing import Iterable, Mapping, Optional, Tuple, Type +from typing import Iterable, List, Mapping, Optional, Tuple, Type import attr from synapse.config._util import validate_config +from synapse.config.sso import SsoAttributeRequirement from synapse.python_dependencies import DependencyException, check_requirements from synapse.types import Collection, JsonDict from synapse.util.module_loader import load_module @@ -191,6 +192,24 @@ class OIDCConfig(Config): # which is set to the claims returned by the UserInfo Endpoint and/or # in the ID Token. # + # It is possible to configure Synapse to only allow logins if certain attributes + # match particular values in the OIDC userinfo. The requirements can be listed under + # `attribute_requirements` as shown below. All of the listed attributes must + # match for the login to be permitted. Additional attributes can be added to + # userinfo by expanding the `scopes` section of the OIDC config to retrieve + # additional information from the OIDC provider. + # + # If the OIDC claim is a list, then the attribute must match any value in the list. + # Otherwise, it must exactly match the value of the claim. Using the example + # below, the `family_name` claim MUST be "Stephensson", but the `groups` + # claim MUST contain "admin". + # + # attribute_requirements: + # - attribute: family_name + # value: "Stephensson" + # - attribute: groups + # value: "admin" + # # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md # for information on how to configure these options. # @@ -223,6 +242,9 @@ class OIDCConfig(Config): # localpart_template: "{{{{ user.login }}}}" # display_name_template: "{{{{ user.name }}}}" # email_template: "{{{{ user.email }}}}" + # attribute_requirements: + # - attribute: userGroup + # value: "synapseUsers" # For use with Keycloak # @@ -232,6 +254,9 @@ class OIDCConfig(Config): # client_id: "synapse" # client_secret: "copy secret generated in Keycloak UI" # scopes: ["openid", "profile"] + # attribute_requirements: + # - attribute: groups + # value: "admin" # For use with Github # @@ -329,6 +354,10 @@ OIDC_PROVIDER_CONFIG_SCHEMA = { }, "allow_existing_users": {"type": "boolean"}, "user_mapping_provider": {"type": ["object", "null"]}, + "attribute_requirements": { + "type": "array", + "items": SsoAttributeRequirement.JSON_SCHEMA, + }, }, } @@ -465,6 +494,11 @@ def _parse_oidc_config_dict( jwt_header=client_secret_jwt_key_config["jwt_header"], jwt_payload=client_secret_jwt_key_config.get("jwt_payload", {}), ) + # parse attribute_requirements from config (list of dicts) into a list of SsoAttributeRequirement + attribute_requirements = [ + SsoAttributeRequirement(**x) + for x in oidc_config.get("attribute_requirements", []) + ] return OidcProviderConfig( idp_id=idp_id, @@ -488,6 +522,7 @@ def _parse_oidc_config_dict( allow_existing_users=oidc_config.get("allow_existing_users", False), user_mapping_provider_class=user_mapping_provider_class, user_mapping_provider_config=user_mapping_provider_config, + attribute_requirements=attribute_requirements, ) @@ -577,3 +612,6 @@ class OidcProviderConfig: # the config of the user mapping provider user_mapping_provider_config = attr.ib() + + # required attributes to require in userinfo to allow login/registration + attribute_requirements = attr.ib(type=List[SsoAttributeRequirement]) diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py index 6d8551a6d..bc3630e9e 100644 --- a/synapse/handlers/oidc_handler.py +++ b/synapse/handlers/oidc_handler.py @@ -280,6 +280,7 @@ class OidcProvider: self._config = provider self._callback_url = hs.config.oidc_callback_url # type: str + self._oidc_attribute_requirements = provider.attribute_requirements self._scopes = provider.scopes self._user_profile_method = provider.user_profile_method @@ -859,6 +860,18 @@ class OidcProvider: ) # otherwise, it's a login + logger.debug("Userinfo for OIDC login: %s", userinfo) + + # Ensure that the attributes of the logged in user meet the required + # attributes by checking the userinfo against attribute_requirements + # In order to deal with the fact that OIDC userinfo can contain many + # types of data, we wrap non-list values in lists. + if not self._sso_handler.check_required_attributes( + request, + {k: v if isinstance(v, list) else [v] for k, v in userinfo.items()}, + self._oidc_attribute_requirements, + ): + return # Call the mapper to register/login the user try: diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 5e9c9c2e8..c7796fb83 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -989,6 +989,138 @@ class OidcHandlerTestCase(HomeserverTestCase): self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) self.assertRenderedError("mapping_error", "localpart is invalid: ") + @override_config( + { + "oidc_config": { + **DEFAULT_CONFIG, + "attribute_requirements": [{"attribute": "test", "value": "foobar"}], + } + } + ) + def test_attribute_requirements(self): + """The required attributes must be met from the OIDC userinfo response.""" + auth_handler = self.hs.get_auth_handler() + auth_handler.complete_sso_login = simple_async_mock() + + # userinfo lacking "test": "foobar" attribute should fail. + userinfo = { + "sub": "tester", + "username": "tester", + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": "foobar" attribute should succeed. + userinfo = { + "sub": "tester", + "username": "tester", + "test": "foobar", + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + + # check that the auth handler got called as expected + auth_handler.complete_sso_login.assert_called_once_with( + "@tester:test", "oidc", ANY, ANY, None, new_user=True + ) + + @override_config( + { + "oidc_config": { + **DEFAULT_CONFIG, + "attribute_requirements": [{"attribute": "test", "value": "foobar"}], + } + } + ) + def test_attribute_requirements_contains(self): + """Test that auth succeeds if userinfo attribute CONTAINS required value""" + auth_handler = self.hs.get_auth_handler() + auth_handler.complete_sso_login = simple_async_mock() + # userinfo with "test": ["foobar", "foo", "bar"] attribute should succeed. + userinfo = { + "sub": "tester", + "username": "tester", + "test": ["foobar", "foo", "bar"], + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + + # check that the auth handler got called as expected + auth_handler.complete_sso_login.assert_called_once_with( + "@tester:test", "oidc", ANY, ANY, None, new_user=True + ) + + @override_config( + { + "oidc_config": { + **DEFAULT_CONFIG, + "attribute_requirements": [{"attribute": "test", "value": "foobar"}], + } + } + ) + def test_attribute_requirements_mismatch(self): + """ + Test that auth fails if attributes exist but don't match, + or are non-string values. + """ + auth_handler = self.hs.get_auth_handler() + auth_handler.complete_sso_login = simple_async_mock() + # userinfo with "test": "not_foobar" attribute should fail + userinfo = { + "sub": "tester", + "username": "tester", + "test": "not_foobar", + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": ["foo", "bar"] attribute should fail + userinfo = { + "sub": "tester", + "username": "tester", + "test": ["foo", "bar"], + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": False attribute should fail + # this is largely just to ensure we don't crash here + userinfo = { + "sub": "tester", + "username": "tester", + "test": False, + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": None attribute should fail + # a value of None breaks the OIDC spec, but it's important to not crash here + userinfo = { + "sub": "tester", + "username": "tester", + "test": None, + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": 1 attribute should fail + # this is largely just to ensure we don't crash here + userinfo = { + "sub": "tester", + "username": "tester", + "test": 1, + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + + # userinfo with "test": 3.14 attribute should fail + # this is largely just to ensure we don't crash here + userinfo = { + "sub": "tester", + "username": "tester", + "test": 3.14, + } + self.get_success(_make_callback_with_userinfo(self.hs, userinfo)) + auth_handler.complete_sso_login.assert_not_called() + def _generate_oidc_session_token( self, state: str, From 27d2820c33d94cd99aea128b6ade76a7de838c3d Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Tue, 16 Mar 2021 19:19:27 +0100 Subject: [PATCH 03/52] Enable flake8-bugbear, but disable most checks. (#9499) * Adds B00 to ignored checks. * Fixes remaining issues. --- changelog.d/9499.misc | 1 + setup.cfg | 3 ++- setup.py | 1 + synapse/app/__init__.py | 4 +++- synapse/config/key.py | 6 +++++- synapse/config/metrics.py | 4 +++- synapse/config/oidc_config.py | 4 +++- synapse/config/repository.py | 4 +++- synapse/config/saml2_config.py | 4 +++- synapse/config/tracer.py | 4 +++- synapse/crypto/context_factory.py | 2 +- tests/unittest.py | 2 +- 12 files changed, 29 insertions(+), 10 deletions(-) create mode 100644 changelog.d/9499.misc diff --git a/changelog.d/9499.misc b/changelog.d/9499.misc new file mode 100644 index 000000000..1513017a1 --- /dev/null +++ b/changelog.d/9499.misc @@ -0,0 +1 @@ +Introduce bugbear to the test suite and fix some of it's lint violations. \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 5e301c2cd..920868df2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,7 +18,8 @@ ignore = # E203: whitespace before ':' (which is contrary to pep8?) # E731: do not assign a lambda expression, use a def # E501: Line too long (black enforces this for us) -ignore=W503,W504,E203,E731,E501 +# B00: Subsection of the bugbear suite (TODO: add in remaining fixes) +ignore=W503,W504,E203,E731,E501,B00 [isort] line_length = 88 diff --git a/setup.py b/setup.py index bbd9e7862..b834e4e55 100755 --- a/setup.py +++ b/setup.py @@ -99,6 +99,7 @@ CONDITIONAL_REQUIREMENTS["lint"] = [ "isort==5.7.0", "black==20.8b1", "flake8-comprehensions", + "flake8-bugbear", "flake8", ] diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index 4a9b0129c..d1a2cd5e1 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -22,7 +22,9 @@ logger = logging.getLogger(__name__) try: python_dependencies.check_requirements() except python_dependencies.DependencyException as e: - sys.stderr.writelines(e.message) + sys.stderr.writelines( + e.message # noqa: B306, DependencyException.message is a property + ) sys.exit(1) diff --git a/synapse/config/key.py b/synapse/config/key.py index de964dff1..350ff1d66 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -404,7 +404,11 @@ def _parse_key_servers(key_servers, federation_verify_certificates): try: jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA) except jsonschema.ValidationError as e: - raise ConfigError("Unable to parse 'trusted_key_servers': " + e.message) + raise ConfigError( + "Unable to parse 'trusted_key_servers': {}".format( + e.message # noqa: B306, jsonschema.ValidationError.message is a valid attribute + ) + ) for server in key_servers: server_name = server["server_name"] diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index dfd27e152..2b289f420 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -56,7 +56,9 @@ class MetricsConfig(Config): try: check_requirements("sentry") except DependencyException as e: - raise ConfigError(e.message) + raise ConfigError( + e.message # noqa: B306, DependencyException.message is a property + ) self.sentry_dsn = config["sentry"].get("dsn") if not self.sentry_dsn: diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index eab042a08..747ab9a7f 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -42,7 +42,9 @@ class OIDCConfig(Config): try: check_requirements("oidc") except DependencyException as e: - raise ConfigError(e.message) from e + raise ConfigError( + e.message # noqa: B306, DependencyException.message is a property + ) from e # check we don't have any duplicate idp_ids now. (The SSO handler will also # check for duplicates when the REST listeners get registered, but that happens diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 69d9de5a4..061c4ec83 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -176,7 +176,9 @@ class ContentRepositoryConfig(Config): check_requirements("url_preview") except DependencyException as e: - raise ConfigError(e.message) + raise ConfigError( + e.message # noqa: B306, DependencyException.message is a property + ) if "url_preview_ip_range_blacklist" not in config: raise ConfigError( diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 4b494f217..6db9cb5ce 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -76,7 +76,9 @@ class SAML2Config(Config): try: check_requirements("saml2") except DependencyException as e: - raise ConfigError(e.message) + raise ConfigError( + e.message # noqa: B306, DependencyException.message is a property + ) self.saml2_enabled = True diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py index 0c1a854f0..727a1e700 100644 --- a/synapse/config/tracer.py +++ b/synapse/config/tracer.py @@ -39,7 +39,9 @@ class TracerConfig(Config): try: check_requirements("opentracing") except DependencyException as e: - raise ConfigError(e.message) + raise ConfigError( + e.message # noqa: B306, DependencyException.message is a property + ) # The tracer is enabled so sanitize the config diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 14b21796d..4ca13011e 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -219,7 +219,7 @@ class SSLClientConnectionCreator: # ... and we also gut-wrench a '_synapse_tls_verifier' attribute into the # tls_protocol so that the SSL context's info callback has something to # call to do the cert verification. - setattr(tls_protocol, "_synapse_tls_verifier", self._verifier) + tls_protocol._synapse_tls_verifier = self._verifier return connection diff --git a/tests/unittest.py b/tests/unittest.py index ca7031c72..224f037ce 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -140,7 +140,7 @@ class TestCase(unittest.TestCase): try: self.assertEquals(attrs[key], getattr(obj, key)) except AssertionError as e: - raise (type(e))(e.message + " for '.%s'" % key) + raise (type(e))("Assert error for '.{}':".format(key)) from e def assert_dict(self, required, actual): """Does a partial assert of a dict. From b449af0379db871945f32a572883d47b5c9018a3 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 17 Mar 2021 07:14:39 -0400 Subject: [PATCH 04/52] Add type hints to the room member handler. (#9631) --- changelog.d/9631.misc | 1 + synapse/handlers/register.py | 4 ++-- synapse/handlers/room_member.py | 4 ++++ synapse/handlers/room_member_worker.py | 10 ++++++++-- synapse/server.py | 4 ++-- 5 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 changelog.d/9631.misc diff --git a/changelog.d/9631.misc b/changelog.d/9631.misc new file mode 100644 index 000000000..35338cd33 --- /dev/null +++ b/changelog.d/9631.misc @@ -0,0 +1 @@ +Add additional type hints to the Homeserver object. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 1abc8875c..d7f226d58 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -437,10 +437,10 @@ class RegistrationHandler(BaseHandler): if RoomAlias.is_valid(r): ( - room_id, + room, remote_room_hosts, ) = await room_member_handler.lookup_room_alias(room_alias) - room_id = room_id.to_string() + room_id = room.to_string() else: raise SynapseError( 400, "%s was not legal room ID or room alias" % (r,) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 166092130..4d20ed835 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -155,6 +155,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): """ raise NotImplementedError() + @abc.abstractmethod + async def forget(self, user: UserID, room_id: str) -> None: + raise NotImplementedError() + def ratelimit_invite(self, room_id: Optional[str], invitee_user_id: str): """Ratelimit invites by room and by target user. diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 108730a7a..d75506c75 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -14,7 +14,7 @@ # limitations under the License. import logging -from typing import List, Optional, Tuple +from typing import TYPE_CHECKING, List, Optional, Tuple from synapse.api.errors import SynapseError from synapse.handlers.room_member import RoomMemberHandler @@ -25,11 +25,14 @@ from synapse.replication.http.membership import ( ) from synapse.types import Requester, UserID +if TYPE_CHECKING: + from synapse.app.homeserver import HomeServer + logger = logging.getLogger(__name__) class RoomMemberWorkerHandler(RoomMemberHandler): - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__(hs) self._remote_join_client = ReplRemoteJoin.make_client(hs) @@ -83,3 +86,6 @@ class RoomMemberWorkerHandler(RoomMemberHandler): await self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="left" ) + + async def forget(self, target: UserID, room_id: str) -> None: + raise RuntimeError("Cannot forget rooms on workers.") diff --git a/synapse/server.py b/synapse/server.py index 48ac87a12..dd4ee7dd3 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -96,7 +96,7 @@ from synapse.handlers.room import ( RoomShutdownHandler, ) from synapse.handlers.room_list import RoomListHandler -from synapse.handlers.room_member import RoomMemberMasterHandler +from synapse.handlers.room_member import RoomMemberHandler, RoomMemberMasterHandler from synapse.handlers.room_member_worker import RoomMemberWorkerHandler from synapse.handlers.search import SearchHandler from synapse.handlers.set_password import SetPasswordHandler @@ -630,7 +630,7 @@ class HomeServer(metaclass=abc.ABCMeta): return ThirdPartyEventRules(self) @cache_in_self - def get_room_member_handler(self): + def get_room_member_handler(self) -> RoomMemberHandler: if self.config.worker_app: return RoomMemberWorkerHandler(self) return RoomMemberMasterHandler(self) From 567f88f835a55d2241cc129ac44b8b0dcedfa6e2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 17 Mar 2021 12:33:18 +0000 Subject: [PATCH 05/52] Prep work for removing `outlier` from `internal_metadata` (#9411) * Populate `internal_metadata.outlier` based on `events` table Rather than relying on `outlier` being in the `internal_metadata` column, populate it based on the `events.outlier` column. * Move `outlier` out of InternalMetadata._dict Ultimately, this will allow us to stop writing it to the database. For now, we have to grandfather it back in so as to maintain compatibility with older versions of Synapse. --- changelog.d/9411.misc | 1 + synapse/events/__init__.py | 9 ++++++--- synapse/events/utils.py | 2 ++ synapse/replication/http/federation.py | 3 +++ synapse/replication/http/send_event.py | 4 +++- synapse/storage/databases/main/events.py | 19 +++++++++++++++++-- .../storage/databases/main/events_worker.py | 5 ++++- 7 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 changelog.d/9411.misc diff --git a/changelog.d/9411.misc b/changelog.d/9411.misc new file mode 100644 index 000000000..c3e6cfa5f --- /dev/null +++ b/changelog.d/9411.misc @@ -0,0 +1 @@ +Preparatory steps for removing redundant `outlier` data from `event_json.internal_metadata` column. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 3ec4120f8..8f6b955d1 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -98,7 +98,7 @@ class DefaultDictProperty(DictProperty): class _EventInternalMetadata: - __slots__ = ["_dict", "stream_ordering"] + __slots__ = ["_dict", "stream_ordering", "outlier"] def __init__(self, internal_metadata_dict: JsonDict): # we have to copy the dict, because it turns out that the same dict is @@ -108,7 +108,10 @@ class _EventInternalMetadata: # the stream ordering of this event. None, until it has been persisted. self.stream_ordering = None # type: Optional[int] - outlier = DictProperty("outlier") # type: bool + # whether this event is an outlier (ie, whether we have the state at that point + # in the DAG) + self.outlier = False + out_of_band_membership = DictProperty("out_of_band_membership") # type: bool send_on_behalf_of = DictProperty("send_on_behalf_of") # type: str recheck_redaction = DictProperty("recheck_redaction") # type: bool @@ -129,7 +132,7 @@ class _EventInternalMetadata: return dict(self._dict) def is_outlier(self) -> bool: - return self._dict.get("outlier", False) + return self.outlier def is_out_of_band_membership(self) -> bool: """Whether this is an out of band membership, like an invite or an invite diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 7ca5c9940..5022e0fcb 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -54,6 +54,8 @@ def prune_event(event: EventBase) -> EventBase: event.internal_metadata.stream_ordering ) + pruned_event.internal_metadata.outlier = event.internal_metadata.outlier + # Mark the event as redacted pruned_event.internal_metadata.redacted = True diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 8af53b4f2..82ea3b895 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -40,6 +40,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): // containing the event "event_format_version": .., // 1,2,3 etc: the event format version "internal_metadata": { .. serialized internal_metadata .. }, + "outlier": true|false, "rejected_reason": .., // The event.rejected_reason field "context": { .. serialized event context .. }, }], @@ -84,6 +85,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): "room_version": event.room_version.identifier, "event_format_version": event.format_version, "internal_metadata": event.internal_metadata.get_dict(), + "outlier": event.internal_metadata.is_outlier(), "rejected_reason": event.rejected_reason, "context": serialized_context, } @@ -116,6 +118,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): event = make_event_from_dict( event_dict, room_ver, internal_metadata, rejected_reason ) + event.internal_metadata.outlier = event_payload["outlier"] context = EventContext.deserialize( self.storage, event_payload["context"] diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 8fa104c8d..a4c5b4429 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -40,6 +40,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): // containing the event "event_format_version": .., // 1,2,3 etc: the event format version "internal_metadata": { .. serialized internal_metadata .. }, + "outlier": true|false, "rejected_reason": .., // The event.rejected_reason field "context": { .. serialized event context .. }, "requester": { .. serialized requester .. }, @@ -79,7 +80,6 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): ratelimit (bool) extra_users (list(UserID)): Any extra users to notify about event """ - serialized_context = await context.serialize(event, store) payload = { @@ -87,6 +87,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): "room_version": event.room_version.identifier, "event_format_version": event.format_version, "internal_metadata": event.internal_metadata.get_dict(), + "outlier": event.internal_metadata.is_outlier(), "rejected_reason": event.rejected_reason, "context": serialized_context, "requester": requester.serialize(), @@ -108,6 +109,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): event = make_event_from_dict( event_dict, room_ver, internal_metadata, rejected_reason ) + event.internal_metadata.outlier = content["outlier"] requester = Requester.deserialize(self.store, content["requester"]) context = EventContext.deserialize(self.storage, content["context"]) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index cd1ceac50..98dac19a9 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1270,8 +1270,10 @@ class PersistEventsStore: logger.exception("") raise + # update the stored internal_metadata to update the "outlier" flag. + # TODO: This is unused as of Synapse 1.31. Remove it once we are happy + # to drop backwards-compatibility with 1.30. metadata_json = json_encoder.encode(event.internal_metadata.get_dict()) - sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?" txn.execute(sql, (metadata_json, event.event_id)) @@ -1319,6 +1321,19 @@ class PersistEventsStore: d.pop("redacted_because", None) return d + def get_internal_metadata(event): + im = event.internal_metadata.get_dict() + + # temporary hack for database compatibility with Synapse 1.30 and earlier: + # store the `outlier` flag inside the internal_metadata json as well as in + # the `events` table, so that if anyone rolls back to an older Synapse, + # things keep working. This can be removed once we are happy to drop support + # for that + if event.internal_metadata.is_outlier(): + im["outlier"] = True + + return im + self.db_pool.simple_insert_many_txn( txn, table="event_json", @@ -1327,7 +1342,7 @@ class PersistEventsStore: "event_id": event.event_id, "room_id": event.room_id, "internal_metadata": json_encoder.encode( - event.internal_metadata.get_dict() + get_internal_metadata(event) ), "json": json_encoder.encode(event_dict(event)), "format_version": event.format_version, diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index c04e162cc..952d4969b 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -799,6 +799,7 @@ class EventsWorkerStore(SQLBaseStore): rejected_reason=rejected_reason, ) original_ev.internal_metadata.stream_ordering = row["stream_ordering"] + original_ev.internal_metadata.outlier = row["outlier"] event_map[event_id] = original_ev @@ -905,7 +906,8 @@ class EventsWorkerStore(SQLBaseStore): ej.json, ej.format_version, r.room_version, - rej.reason + rej.reason, + e.outlier FROM events AS e JOIN event_json AS ej USING (event_id) LEFT JOIN rooms r ON r.room_id = e.room_id @@ -929,6 +931,7 @@ class EventsWorkerStore(SQLBaseStore): "room_version_id": row[5], "rejected_reason": row[6], "redactions": [], + "outlier": row[7], } # check for redactions From ad721fc559b4af6140852adf58c93ae6ab0bf6b5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 17 Mar 2021 13:20:08 +0000 Subject: [PATCH 06/52] Fix bad naming of storage function (#9637) We had two functions named `get_forward_extremities_for_room` and `get_forward_extremeties_for_room` that took different paramters. We rename one of them to avoid confusion. --- changelog.d/9637.misc | 1 + synapse/handlers/device.py | 2 +- synapse/handlers/sync.py | 6 ++++-- synapse/storage/databases/main/event_federation.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 changelog.d/9637.misc diff --git a/changelog.d/9637.misc b/changelog.d/9637.misc new file mode 100644 index 000000000..90a27d9f8 --- /dev/null +++ b/changelog.d/9637.misc @@ -0,0 +1 @@ +Rename storage function to fix spelling and not conflict with another functions name. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index df3cdc8fb..6aa3f73ee 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -166,7 +166,7 @@ class DeviceWorkerHandler(BaseHandler): # Fetch the current state at the time. try: - event_ids = await self.store.get_forward_extremeties_for_room( + event_ids = await self.store.get_forward_extremities_for_room_at_stream_ordering( room_id, stream_ordering=stream_ordering ) except errors.StoreError: diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f50257cd5..7b723ead5 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1979,8 +1979,10 @@ class SyncHandler: logger.info("User joined room after current token: %s", room_id) - extrems = await self.store.get_forward_extremeties_for_room( - room_id, event_pos.stream + extrems = ( + await self.store.get_forward_extremities_for_room_at_stream_ordering( + room_id, event_pos.stream + ) ) users_in_room = await self.state.get_current_users_in_room(room_id, extrems) if user_id in users_in_room: diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 332193ad1..a956be491 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -793,7 +793,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas return int(min_depth) if min_depth is not None else None - async def get_forward_extremeties_for_room( + async def get_forward_extremities_for_room_at_stream_ordering( self, room_id: str, stream_ordering: int ) -> List[str]: """For a given room_id and stream_ordering, return the forward From 73dbce55232b5e827dc59a3d0dee075d82162bf7 Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Wed, 17 Mar 2021 11:04:57 -0400 Subject: [PATCH 07/52] only save remote cross-signing keys if they're different from the current ones (#9634) Co-authored-by: Patrick Cloke --- changelog.d/9634.misc | 1 + synapse/handlers/device.py | 22 ++++++++++++++++++---- 2 files changed, 19 insertions(+), 4 deletions(-) create mode 100644 changelog.d/9634.misc diff --git a/changelog.d/9634.misc b/changelog.d/9634.misc new file mode 100644 index 000000000..59ac42cb8 --- /dev/null +++ b/changelog.d/9634.misc @@ -0,0 +1 @@ +Only save remote cross-signing and device keys if they're different from the current ones. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 6aa3f73ee..2fc4951df 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -907,6 +907,7 @@ class DeviceListUpdater: master_key = result.get("master_key") self_signing_key = result.get("self_signing_key") + ignore_devices = False # If the remote server has more than ~1000 devices for this user # we assume that something is going horribly wrong (e.g. a bot # that logs in and creates a new device every time it tries to @@ -925,6 +926,12 @@ class DeviceListUpdater: len(devices), ) devices = [] + ignore_devices = True + else: + cached_devices = await self.store.get_cached_devices_for_user(user_id) + if cached_devices == {d["device_id"]: d for d in devices}: + devices = [] + ignore_devices = True for device in devices: logger.debug( @@ -934,7 +941,10 @@ class DeviceListUpdater: stream_id, ) - await self.store.update_remote_device_list_cache(user_id, devices, stream_id) + if not ignore_devices: + await self.store.update_remote_device_list_cache( + user_id, devices, stream_id + ) device_ids = [device["device_id"] for device in devices] # Handle cross-signing keys. @@ -945,7 +955,8 @@ class DeviceListUpdater: ) device_ids = device_ids + cross_signing_device_ids - await self.device_handler.notify_device_update(user_id, device_ids) + if device_ids: + await self.device_handler.notify_device_update(user_id, device_ids) # We clobber the seen updates since we've re-synced from a given # point. @@ -973,14 +984,17 @@ class DeviceListUpdater: """ device_ids = [] - if master_key: + current_keys_map = await self.store.get_e2e_cross_signing_keys_bulk([user_id]) + current_keys = current_keys_map.get(user_id) or {} + + if master_key and master_key != current_keys.get("master"): await self.store.set_e2e_cross_signing_key(user_id, "master", master_key) _, verify_key = get_verify_key_from_cross_signing_key(master_key) # verify_key is a VerifyKey from signedjson, which uses # .version to denote the portion of the key ID after the # algorithm and colon, which is the device ID device_ids.append(verify_key.version) - if self_signing_key: + if self_signing_key and self_signing_key != current_keys.get("self_signing"): await self.store.set_e2e_cross_signing_key( user_id, "self_signing", self_signing_key ) From cc324d53fe531d002aca28a9d8e5b85768cdef23 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 17 Mar 2021 11:30:21 -0400 Subject: [PATCH 08/52] Fix up types for the typing handler. (#9638) By splitting this to two separate methods the callers know what methods they can expect on the handler. --- changelog.d/9638.misc | 1 + synapse/replication/tcp/streams/_base.py | 17 ++++++++++------- synapse/rest/client/v1/room.py | 15 +++++++++------ synapse/server.py | 11 ++++++++++- 4 files changed, 30 insertions(+), 14 deletions(-) create mode 100644 changelog.d/9638.misc diff --git a/changelog.d/9638.misc b/changelog.d/9638.misc new file mode 100644 index 000000000..35338cd33 --- /dev/null +++ b/changelog.d/9638.misc @@ -0,0 +1 @@ +Add additional type hints to the Homeserver object. diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index f45e7a8c8..7e8e64d61 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -33,7 +33,7 @@ import attr from synapse.replication.http.streams import ReplicationGetStreamUpdates if TYPE_CHECKING: - import synapse.server + from synapse.app.homeserver import HomeServer logger = logging.getLogger(__name__) @@ -299,20 +299,23 @@ class TypingStream(Stream): NAME = "typing" ROW_TYPE = TypingStreamRow - def __init__(self, hs): - typing_handler = hs.get_typing_handler() - + def __init__(self, hs: "HomeServer"): writer_instance = hs.config.worker.writers.typing if writer_instance == hs.get_instance_name(): # On the writer, query the typing handler - update_function = typing_handler.get_all_typing_updates + typing_writer_handler = hs.get_typing_writer_handler() + update_function = ( + typing_writer_handler.get_all_typing_updates + ) # type: Callable[[str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]]] + current_token_function = typing_writer_handler.get_current_token else: # Query the typing writer process update_function = make_http_update_function(hs, self.NAME) + current_token_function = hs.get_typing_handler().get_current_token super().__init__( hs.get_instance_name(), - current_token_without_instance(typing_handler.get_current_token), + current_token_without_instance(current_token_function), update_function, ) @@ -509,7 +512,7 @@ class AccountDataStream(Stream): NAME = "account_data" ROW_TYPE = AccountDataStreamRow - def __init__(self, hs: "synapse.server.HomeServer"): + def __init__(self, hs: "HomeServer"): self.store = hs.get_datastore() super().__init__( hs.get_instance_name(), diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 5884daea6..e7a8207eb 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -49,7 +49,7 @@ from synapse.util import json_decoder from synapse.util.stringutils import parse_and_validate_server_name, random_string if TYPE_CHECKING: - import synapse.server + from synapse.app.homeserver import HomeServer logger = logging.getLogger(__name__) @@ -846,10 +846,10 @@ class RoomTypingRestServlet(RestServlet): "/rooms/(?P[^/]*)/typing/(?P[^/]*)$", v1=True ) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() + self.hs = hs self.presence_handler = hs.get_presence_handler() - self.typing_handler = hs.get_typing_handler() self.auth = hs.get_auth() # If we're not on the typing writer instance we should scream if we get @@ -874,16 +874,19 @@ class RoomTypingRestServlet(RestServlet): # Limit timeout to stop people from setting silly typing timeouts. timeout = min(content.get("timeout", 30000), 120000) + # Defer getting the typing handler since it will raise on workers. + typing_handler = self.hs.get_typing_writer_handler() + try: if content["typing"]: - await self.typing_handler.started_typing( + await typing_handler.started_typing( target_user=target_user, requester=requester, room_id=room_id, timeout=timeout, ) else: - await self.typing_handler.stopped_typing( + await typing_handler.stopped_typing( target_user=target_user, requester=requester, room_id=room_id ) except ShadowBanError: @@ -901,7 +904,7 @@ class RoomAliasListServlet(RestServlet): ), ] - def __init__(self, hs: "synapse.server.HomeServer"): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.directory_handler = hs.get_directory_handler() diff --git a/synapse/server.py b/synapse/server.py index dd4ee7dd3..d11d08c57 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -417,9 +417,18 @@ class HomeServer(metaclass=abc.ABCMeta): return PresenceHandler(self) @cache_in_self - def get_typing_handler(self): + def get_typing_writer_handler(self) -> TypingWriterHandler: if self.config.worker.writers.typing == self.get_instance_name(): return TypingWriterHandler(self) + else: + raise Exception("Workers cannot write typing") + + @cache_in_self + def get_typing_handler(self) -> FollowerTypingHandler: + if self.config.worker.writers.typing == self.get_instance_name(): + # Use get_typing_writer_handler to ensure that we use the same + # cached version. + return self.get_typing_writer_handler() else: return FollowerTypingHandler(self) From 7b06f85c0e18b62775f12789fdf4adb6a0a47a4b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 17 Mar 2021 16:51:55 +0000 Subject: [PATCH 09/52] Ensure we use a copy of the event content dict before modifying it in serialize_event (#9585) This bug was discovered by DINUM. We were modifying `serialized_event["content"]`, which - if you've got `USE_FROZEN_DICTS` turned on or are [using a third party rules module](https://github.com/matrix-org/synapse/blob/17cd48fe5171d50da4cb59db647b993168e7dfab/synapse/events/third_party_rules.py#L73-L76) - will raise a 500 if you try to a edit a reply to a message. `serialized_event["content"]` could be set to the edit event's content, instead of a copy of it, which is bad as we attempt to modify it. Instead, we also end up modifying the original event's content. DINUM uses a third party rules module, which meant the event's content got frozen and thus an exception was raised. To be clear, the problem is not that the event's content was frozen. In fact doing so helped us uncover the fact we weren't copying event content correctly. --- changelog.d/9585.bugfix | 1 + synapse/events/utils.py | 14 ++++- tests/rest/client/test_third_party_rules.py | 62 ++++++++++++++++++++ tests/rest/client/v2_alpha/test_relations.py | 62 ++++++++++++++++++++ tests/unittest.py | 10 ++++ 5 files changed, 147 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9585.bugfix diff --git a/changelog.d/9585.bugfix b/changelog.d/9585.bugfix new file mode 100644 index 000000000..de472ddfd --- /dev/null +++ b/changelog.d/9585.bugfix @@ -0,0 +1 @@ +Fix a longstanding bug that could cause issues when editing a reply to a message. \ No newline at end of file diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 5022e0fcb..0f8a3b5ad 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -22,6 +22,7 @@ from synapse.api.constants import EventTypes, RelationTypes from synapse.api.errors import Codes, SynapseError from synapse.api.room_versions import RoomVersion from synapse.util.async_helpers import yieldable_gather_results +from synapse.util.frozenutils import unfreeze from . import EventBase @@ -402,10 +403,19 @@ class EventClientSerializer: # If there is an edit replace the content, preserving existing # relations. + # Ensure we take copies of the edit content, otherwise we risk modifying + # the original event. + edit_content = edit.content.copy() + + # Unfreeze the event content if necessary, so that we may modify it below + edit_content = unfreeze(edit_content) + serialized_event["content"] = edit_content.get("m.new_content", {}) + + # Check for existing relations relations = event.content.get("m.relates_to") - serialized_event["content"] = edit.content.get("m.new_content", {}) if relations: - serialized_event["content"]["m.relates_to"] = relations + # Keep the relations, ensuring we use a dict copy of the original + serialized_event["content"]["m.relates_to"] = relations.copy() else: serialized_event["content"].pop("m.relates_to", None) diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index 227fffab5..bf3901427 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -161,6 +161,68 @@ class ThirdPartyRulesTestCase(unittest.HomeserverTestCase): ev = channel.json_body self.assertEqual(ev["content"]["x"], "y") + def test_message_edit(self): + """Ensure that the module doesn't cause issues with edited messages.""" + # first patch the event checker so that it will modify the event + async def check(ev: EventBase, state): + d = ev.get_dict() + d["content"] = { + "msgtype": "m.text", + "body": d["content"]["body"].upper(), + } + return d + + current_rules_module().check_event_allowed = check + + # Send an event, then edit it. + channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/modifyme/1" % self.room_id, + { + "msgtype": "m.text", + "body": "Original body", + }, + access_token=self.tok, + ) + self.assertEqual(channel.result["code"], b"200", channel.result) + orig_event_id = channel.json_body["event_id"] + + channel = self.make_request( + "PUT", + "/_matrix/client/r0/rooms/%s/send/m.room.message/2" % self.room_id, + { + "m.new_content": {"msgtype": "m.text", "body": "Edited body"}, + "m.relates_to": { + "rel_type": "m.replace", + "event_id": orig_event_id, + }, + "msgtype": "m.text", + "body": "Edited body", + }, + access_token=self.tok, + ) + self.assertEqual(channel.result["code"], b"200", channel.result) + edited_event_id = channel.json_body["event_id"] + + # ... and check that they both got modified + channel = self.make_request( + "GET", + "/_matrix/client/r0/rooms/%s/event/%s" % (self.room_id, orig_event_id), + access_token=self.tok, + ) + self.assertEqual(channel.result["code"], b"200", channel.result) + ev = channel.json_body + self.assertEqual(ev["content"]["body"], "ORIGINAL BODY") + + channel = self.make_request( + "GET", + "/_matrix/client/r0/rooms/%s/event/%s" % (self.room_id, edited_event_id), + access_token=self.tok, + ) + self.assertEqual(channel.result["code"], b"200", channel.result) + ev = channel.json_body + self.assertEqual(ev["content"]["body"], "EDITED BODY") + def test_send_event(self): """Tests that the module can send an event into a room via the module api""" content = { diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py index 7c457754f..e7bb5583f 100644 --- a/tests/rest/client/v2_alpha/test_relations.py +++ b/tests/rest/client/v2_alpha/test_relations.py @@ -39,6 +39,11 @@ class RelationsTestCase(unittest.HomeserverTestCase): # We need to enable msc1849 support for aggregations config = self.default_config() config["experimental_msc1849_support_enabled"] = True + + # We enable frozen dicts as relations/edits change event contents, so we + # want to test that we don't modify the events in the caches. + config["use_frozen_dicts"] = True + return self.setup_test_homeserver(config=config) def prepare(self, reactor, clock, hs): @@ -518,6 +523,63 @@ class RelationsTestCase(unittest.HomeserverTestCase): {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict ) + def test_edit_reply(self): + """Test that editing a reply works.""" + + # Create a reply to edit. + channel = self._send_relation( + RelationTypes.REFERENCE, + "m.room.message", + content={"msgtype": "m.text", "body": "A reply!"}, + ) + self.assertEquals(200, channel.code, channel.json_body) + reply = channel.json_body["event_id"] + + new_body = {"msgtype": "m.text", "body": "I've been edited!"} + channel = self._send_relation( + RelationTypes.REPLACE, + "m.room.message", + content={"msgtype": "m.text", "body": "foo", "m.new_content": new_body}, + parent_id=reply, + ) + self.assertEquals(200, channel.code, channel.json_body) + + edit_event_id = channel.json_body["event_id"] + + channel = self.make_request( + "GET", + "/rooms/%s/event/%s" % (self.room, reply), + access_token=self.user_token, + ) + self.assertEquals(200, channel.code, channel.json_body) + + # We expect to see the new body in the dict, as well as the reference + # metadata sill intact. + self.assertDictContainsSubset(new_body, channel.json_body["content"]) + self.assertDictContainsSubset( + { + "m.relates_to": { + "event_id": self.parent_id, + "key": None, + "rel_type": "m.reference", + } + }, + channel.json_body["content"], + ) + + # We expect that the edit relation appears in the unsigned relations + # section. + relations_dict = channel.json_body["unsigned"].get("m.relations") + self.assertIn(RelationTypes.REPLACE, relations_dict) + + m_replace_dict = relations_dict[RelationTypes.REPLACE] + for key in ["event_id", "sender", "origin_server_ts"]: + self.assertIn(key, m_replace_dict) + + self.assert_dict( + {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict + ) + def test_relations_redaction_redacts_edits(self): """Test that edits of an event are redacted when the original event is redacted. diff --git a/tests/unittest.py b/tests/unittest.py index 224f037ce..58a4daa1e 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -32,6 +32,7 @@ from twisted.python.threadpool import ThreadPool from twisted.trial import unittest from twisted.web.resource import Resource +from synapse import events from synapse.api.constants import EventTypes, Membership from synapse.config.homeserver import HomeServerConfig from synapse.config.ratelimiting import FederationRateLimitConfig @@ -229,6 +230,11 @@ class HomeserverTestCase(TestCase): self._hs_args = {"clock": self.clock, "reactor": self.reactor} self.hs = self.make_homeserver(self.reactor, self.clock) + # Honour the `use_frozen_dicts` config option. We have to do this + # manually because this is taken care of in the app `start` code, which + # we don't run. Plus we want to reset it on tearDown. + events.USE_FROZEN_DICTS = self.hs.config.use_frozen_dicts + if self.hs is None: raise Exception("No homeserver returned from make_homeserver.") @@ -292,6 +298,10 @@ class HomeserverTestCase(TestCase): if hasattr(self, "prepare"): self.prepare(self.reactor, self.clock, self.hs) + def tearDown(self): + # Reset to not use frozen dicts. + events.USE_FROZEN_DICTS = False + def wait_on_thread(self, deferred, timeout=10): """ Wait until a Deferred is done, where it's waiting on a real thread. From 405aeb0b2c40443d22ce8c265df18e81bd995b44 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 18 Mar 2021 16:34:47 +0100 Subject: [PATCH 10/52] Implement MSC3026: busy presence state --- changelog.d/9644.feature | 1 + synapse/api/constants.py | 1 + synapse/app/generic_worker.py | 1 + synapse/handlers/presence.py | 3 ++- synapse/rest/client/versions.py | 2 ++ tests/handlers/test_presence.py | 20 ++++++++++++++++++++ 6 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 changelog.d/9644.feature diff --git a/changelog.d/9644.feature b/changelog.d/9644.feature new file mode 100644 index 000000000..556bcf0f9 --- /dev/null +++ b/changelog.d/9644.feature @@ -0,0 +1 @@ +Implement the busy presence state as described in [MSC3026](https://github.com/matrix-org/matrix-doc/pull/3026). diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 691f8f9ad..cc8541bc1 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -51,6 +51,7 @@ class PresenceState: OFFLINE = "offline" UNAVAILABLE = "unavailable" ONLINE = "online" + BUSY = "org.matrix.msc3026.busy" class JoinRules: diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 274d582d0..236d98a29 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -439,6 +439,7 @@ class GenericWorkerPresence(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, + PresenceState.BUSY, ) if presence not in valid_presence: raise SynapseError(400, "Invalid presence state") diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 54631b4ee..bcb99f627 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -730,6 +730,7 @@ class PresenceHandler(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, + PresenceState.BUSY, ) if presence not in valid_presence: raise SynapseError(400, "Invalid presence state") @@ -744,7 +745,7 @@ class PresenceHandler(BasePresenceHandler): msg = status_msg if presence != PresenceState.OFFLINE else None new_fields["status_msg"] = msg - if presence == PresenceState.ONLINE: + if presence == PresenceState.ONLINE or presence == PresenceState.BUSY: new_fields["last_active_ts"] = self.clock.time_msec() await self._update_states([prev_state.copy_and_replace(**new_fields)]) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index d24a19931..f387d29b5 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -81,6 +81,8 @@ class VersionsRestServlet(RestServlet): "io.element.e2ee_forced.public": self.e2ee_forced_public, "io.element.e2ee_forced.private": self.e2ee_forced_private, "io.element.e2ee_forced.trusted_private": self.e2ee_forced_trusted_private, + # Supports the busy presence state described in MSC3026. + "org.matrix.msc3026.busy_presence": True, }, }, ) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 996c61419..77330f59a 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -310,6 +310,26 @@ class PresenceTimeoutTestCase(unittest.TestCase): self.assertIsNotNone(new_state) self.assertEquals(new_state.state, PresenceState.UNAVAILABLE) + def test_busy_no_idle(self): + """ + Tests that a user setting their presence to busy but idling doesn't turn their + presence state into unavailable. + """ + user_id = "@foo:bar" + now = 5000000 + + state = UserPresenceState.default(user_id) + state = state.copy_and_replace( + state=PresenceState.BUSY, + last_active_ts=now - IDLE_TIMER - 1, + last_user_sync_ts=now, + ) + + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) + + self.assertIsNotNone(new_state) + self.assertEquals(new_state.state, PresenceState.BUSY) + def test_sync_timeout(self): user_id = "@foo:bar" now = 5000000 From dd71eb0f8ab5a6e0d8eda3be8c2d5ff01271d147 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 18 Mar 2021 15:52:26 +0000 Subject: [PATCH 11/52] Make federation catchup send last event from any server. (#9640) Currently federation catchup will send the last *local* event that we failed to send to the remote. This can cause issues for large rooms where lots of servers have sent events while the remote server was down, as when it comes back up again it'll be flooded with events from various points in the DAG. Instead, let's make it so that all the servers send the most recent events, even if its not theirs. The remote should deduplicate the events, so there shouldn't be much overhead in doing this. Alternatively, the servers could only send local events if they were also extremities and hope that the other server will send the event over, but that is a bit risky. --- changelog.d/9640.misc | 1 + synapse/federation/federation_server.py | 25 +---- .../sender/per_destination_queue.py | 104 +++++++++++++++--- tests/federation/test_federation_catch_up.py | 49 +++++++++ 4 files changed, 141 insertions(+), 38 deletions(-) create mode 100644 changelog.d/9640.misc diff --git a/changelog.d/9640.misc b/changelog.d/9640.misc new file mode 100644 index 000000000..3d410ed4c --- /dev/null +++ b/changelog.d/9640.misc @@ -0,0 +1 @@ +Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9839d3d01..d84e36207 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -35,7 +35,7 @@ from twisted.internet import defer from twisted.internet.abstract import isIPAddress from twisted.python import failure -from synapse.api.constants import EduTypes, EventTypes, Membership +from synapse.api.constants import EduTypes, EventTypes from synapse.api.errors import ( AuthError, Codes, @@ -63,7 +63,7 @@ from synapse.replication.http.federation import ( ReplicationFederationSendEduRestServlet, ReplicationGetQueryRestServlet, ) -from synapse.types import JsonDict, get_domain_from_id +from synapse.types import JsonDict from synapse.util import glob_to_regex, json_decoder, unwrapFirstError from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache @@ -727,27 +727,6 @@ class FederationServer(FederationBase): if the event was unacceptable for any other reason (eg, too large, too many prev_events, couldn't find the prev_events) """ - # check that it's actually being sent from a valid destination to - # workaround bug #1753 in 0.18.5 and 0.18.6 - if origin != get_domain_from_id(pdu.sender): - # We continue to accept join events from any server; this is - # necessary for the federation join dance to work correctly. - # (When we join over federation, the "helper" server is - # responsible for sending out the join event, rather than the - # origin. See bug #1893. This is also true for some third party - # invites). - if not ( - pdu.type == "m.room.member" - and pdu.content - and pdu.content.get("membership", None) - in (Membership.JOIN, Membership.INVITE) - ): - logger.info( - "Discarding PDU %s from invalid origin %s", pdu.event_id, origin - ) - return - else: - logger.info("Accepting join PDU %s from %s", pdu.event_id, origin) # We've already checked that we know the room version by this point room_version = await self.store.get_room_version(pdu.room_id) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index cc0d765e5..af85fe0a1 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -15,7 +15,7 @@ # limitations under the License. import datetime import logging -from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple import attr from prometheus_client import Counter @@ -77,6 +77,7 @@ class PerDestinationQueue: self._transaction_manager = transaction_manager self._instance_name = hs.get_instance_name() self._federation_shard_config = hs.config.worker.federation_shard_config + self._state = hs.get_state_handler() self._should_send_on_this_instance = True if not self._federation_shard_config.should_handle( @@ -415,22 +416,95 @@ class PerDestinationQueue: "This should not happen." % event_ids ) - if logger.isEnabledFor(logging.INFO): - rooms = [p.room_id for p in catchup_pdus] - logger.info("Catching up rooms to %s: %r", self._destination, rooms) + # We send transactions with events from one room only, as its likely + # that the remote will have to do additional processing, which may + # take some time. It's better to give it small amounts of work + # rather than risk the request timing out and repeatedly being + # retried, and not making any progress. + # + # Note: `catchup_pdus` will have exactly one PDU per room. + for pdu in catchup_pdus: + # The PDU from the DB will be the last PDU in the room from + # *this server* that wasn't sent to the remote. However, other + # servers may have sent lots of events since then, and we want + # to try and tell the remote only about the *latest* events in + # the room. This is so that it doesn't get inundated by events + # from various parts of the DAG, which all need to be processed. + # + # Note: this does mean that in large rooms a server coming back + # online will get sent the same events from all the different + # servers, but the remote will correctly deduplicate them and + # handle it only once. - await self._transaction_manager.send_new_transaction( - self._destination, catchup_pdus, [] - ) + # Step 1, fetch the current extremities + extrems = await self._store.get_prev_events_for_room(pdu.room_id) - sent_transactions_counter.inc() - final_pdu = catchup_pdus[-1] - self._last_successful_stream_ordering = cast( - int, final_pdu.internal_metadata.stream_ordering - ) - await self._store.set_destination_last_successful_stream_ordering( - self._destination, self._last_successful_stream_ordering - ) + if pdu.event_id in extrems: + # If the event is in the extremities, then great! We can just + # use that without having to do further checks. + room_catchup_pdus = [pdu] + else: + # If not, fetch the extremities and figure out which we can + # send. + extrem_events = await self._store.get_events_as_list(extrems) + + new_pdus = [] + for p in extrem_events: + # We pulled this from the DB, so it'll be non-null + assert p.internal_metadata.stream_ordering + + # Filter out events that happened before the remote went + # offline + if ( + p.internal_metadata.stream_ordering + < self._last_successful_stream_ordering + ): + continue + + # Filter out events where the server is not in the room, + # e.g. it may have left/been kicked. *Ideally* we'd pull + # out the kick and send that, but it's a rare edge case + # so we don't bother for now (the server that sent the + # kick should send it out if its online). + hosts = await self._state.get_hosts_in_room_at_events( + p.room_id, [p.event_id] + ) + if self._destination not in hosts: + continue + + new_pdus.append(p) + + # If we've filtered out all the extremities, fall back to + # sending the original event. This should ensure that the + # server gets at least some of missed events (especially if + # the other sending servers are up). + if new_pdus: + room_catchup_pdus = new_pdus + + logger.info( + "Catching up rooms to %s: %r", self._destination, pdu.room_id + ) + + await self._transaction_manager.send_new_transaction( + self._destination, room_catchup_pdus, [] + ) + + sent_transactions_counter.inc() + + # We pulled this from the DB, so it'll be non-null + assert pdu.internal_metadata.stream_ordering + + # Note that we mark the last successful stream ordering as that + # from the *original* PDU, rather than the PDU(s) we actually + # send. This is because we use it to mark our position in the + # queue of missed PDUs to process. + self._last_successful_stream_ordering = ( + pdu.internal_metadata.stream_ordering + ) + + await self._store.set_destination_last_successful_stream_ordering( + self._destination, self._last_successful_stream_ordering + ) def _get_rr_edus(self, force_flush: bool) -> Iterable[Edu]: if not self._pending_rrs: diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py index 6f96cd794..95eac6a5a 100644 --- a/tests/federation/test_federation_catch_up.py +++ b/tests/federation/test_federation_catch_up.py @@ -2,6 +2,7 @@ from typing import List, Tuple from mock import Mock +from synapse.api.constants import EventTypes from synapse.events import EventBase from synapse.federation.sender import PerDestinationQueue, TransactionManager from synapse.federation.units import Edu @@ -421,3 +422,51 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase): self.assertNotIn("zzzerver", woken) # - all destinations are woken exactly once; they appear once in woken. self.assertCountEqual(woken, server_names[:-1]) + + @override_config({"send_federation": True}) + def test_not_latest_event(self): + """Test that we send the latest event in the room even if its not ours.""" + + per_dest_queue, sent_pdus = self.make_fake_destination_queue() + + # Make a room with a local user, and two servers. One will go offline + # and one will send some events. + self.register_user("u1", "you the one") + u1_token = self.login("u1", "you the one") + room_1 = self.helper.create_room_as("u1", tok=u1_token) + + self.get_success( + event_injection.inject_member_event(self.hs, room_1, "@user:host2", "join") + ) + event_1 = self.get_success( + event_injection.inject_member_event(self.hs, room_1, "@user:host3", "join") + ) + + # First we send something from the local server, so that we notice the + # remote is down and go into catchup mode. + self.helper.send(room_1, "you hear me!!", tok=u1_token) + + # Now simulate us receiving an event from the still online remote. + event_2 = self.get_success( + event_injection.inject_event( + self.hs, + type=EventTypes.Message, + sender="@user:host3", + room_id=room_1, + content={"msgtype": "m.text", "body": "Hello"}, + ) + ) + + self.get_success( + self.hs.get_datastore().set_destination_last_successful_stream_ordering( + "host2", event_1.internal_metadata.stream_ordering + ) + ) + + self.get_success(per_dest_queue._catch_up_transmission_loop()) + + # We expect only the last message from the remote, event_2, to have been + # sent, rather than the last *local* event that was sent. + self.assertEqual(len(sent_pdus), 1) + self.assertEqual(sent_pdus[0].event_id, event_2.event_id) + self.assertFalse(per_dest_queue._catching_up) From 8dd2ea65a9566fd0850df0d989f700f61b490ed9 Mon Sep 17 00:00:00 2001 From: Dirk Klimpel <5740567+dklimpel@users.noreply.github.com> Date: Thu, 18 Mar 2021 17:54:08 +0100 Subject: [PATCH 12/52] Consistently check whether a password may be set for a user. (#9636) --- changelog.d/9636.bugfix | 1 + synapse/handlers/set_password.py | 2 +- synapse/rest/admin/users.py | 2 +- .../storage/databases/main/registration.py | 1 + tests/rest/admin/test_user.py | 195 ++++++++++++------ 5 files changed, 133 insertions(+), 68 deletions(-) create mode 100644 changelog.d/9636.bugfix diff --git a/changelog.d/9636.bugfix b/changelog.d/9636.bugfix new file mode 100644 index 000000000..fa772ed6f --- /dev/null +++ b/changelog.d/9636.bugfix @@ -0,0 +1 @@ +Checks if passwords are allowed before setting it for the user. \ No newline at end of file diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 84af2dde7..04e7c64c9 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -41,7 +41,7 @@ class SetPasswordHandler(BaseHandler): logout_devices: bool, requester: Optional[Requester] = None, ) -> None: - if not self.hs.config.password_localdb_enabled: + if not self._auth_handler.can_change_password(): raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN) try: diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 2c89b62e2..aaa56a702 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -271,7 +271,7 @@ class UserRestServletV2(RestServlet): elif not deactivate and user["deactivated"]: if ( "password" not in body - and self.hs.config.password_localdb_enabled + and self.auth_handler.can_change_password() ): raise SynapseError( 400, "Must provide a password to re-activate an account." diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index eba66ff35..90a8f664e 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -1210,6 +1210,7 @@ class RegistrationBackgroundUpdateStore(RegistrationWorkerStore): self._invalidate_cache_and_stream( txn, self.get_user_deactivated_status, (user_id,) ) + self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,)) txn.call_after(self.is_guest.invalidate, (user_id,)) @cached() diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index e58d5cf0d..cf61f284c 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -1003,12 +1003,23 @@ class UserRestTestCase(unittest.HomeserverTestCase): def prepare(self, reactor, clock, hs): self.store = hs.get_datastore() + self.auth_handler = hs.get_auth_handler() + # create users and get access tokens + # regardless of whether password login or SSO is allowed self.admin_user = self.register_user("admin", "pass", admin=True) - self.admin_user_tok = self.login("admin", "pass") + self.admin_user_tok = self.get_success( + self.auth_handler.get_access_token_for_user_id( + self.admin_user, device_id=None, valid_until_ms=None + ) + ) self.other_user = self.register_user("user", "pass", displayname="User") - self.other_user_token = self.login("user", "pass") + self.other_user_token = self.get_success( + self.auth_handler.get_access_token_for_user_id( + self.other_user, device_id=None, valid_until_ms=None + ) + ) self.url_other_user = "/_synapse/admin/v2/users/%s" % urllib.parse.quote( self.other_user ) @@ -1081,7 +1092,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual("Bob's name", channel.json_body["displayname"]) self.assertEqual("email", channel.json_body["threepids"][0]["medium"]) self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"]) - self.assertEqual(True, channel.json_body["admin"]) + self.assertTrue(channel.json_body["admin"]) self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"]) # Get user @@ -1096,9 +1107,9 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual("Bob's name", channel.json_body["displayname"]) self.assertEqual("email", channel.json_body["threepids"][0]["medium"]) self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"]) - self.assertEqual(True, channel.json_body["admin"]) - self.assertEqual(False, channel.json_body["is_guest"]) - self.assertEqual(False, channel.json_body["deactivated"]) + self.assertTrue(channel.json_body["admin"]) + self.assertFalse(channel.json_body["is_guest"]) + self.assertFalse(channel.json_body["deactivated"]) self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"]) def test_create_user(self): @@ -1130,7 +1141,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual("Bob's name", channel.json_body["displayname"]) self.assertEqual("email", channel.json_body["threepids"][0]["medium"]) self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"]) - self.assertEqual(False, channel.json_body["admin"]) + self.assertFalse(channel.json_body["admin"]) self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"]) # Get user @@ -1145,10 +1156,10 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual("Bob's name", channel.json_body["displayname"]) self.assertEqual("email", channel.json_body["threepids"][0]["medium"]) self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"]) - self.assertEqual(False, channel.json_body["admin"]) - self.assertEqual(False, channel.json_body["is_guest"]) - self.assertEqual(False, channel.json_body["deactivated"]) - self.assertEqual(False, channel.json_body["shadow_banned"]) + self.assertFalse(channel.json_body["admin"]) + self.assertFalse(channel.json_body["is_guest"]) + self.assertFalse(channel.json_body["deactivated"]) + self.assertFalse(channel.json_body["shadow_banned"]) self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"]) @override_config( @@ -1197,7 +1208,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@bob:test", channel.json_body["name"]) - self.assertEqual(False, channel.json_body["admin"]) + self.assertFalse(channel.json_body["admin"]) @override_config( {"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0} @@ -1237,7 +1248,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): # Admin user is not blocked by mau anymore self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@bob:test", channel.json_body["name"]) - self.assertEqual(False, channel.json_body["admin"]) + self.assertFalse(channel.json_body["admin"]) @override_config( { @@ -1429,24 +1440,23 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(False, channel.json_body["deactivated"]) + self.assertFalse(channel.json_body["deactivated"]) self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"]) self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) self.assertEqual("User", channel.json_body["displayname"]) # Deactivate user - body = json.dumps({"deactivated": True}) - channel = self.make_request( "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"deactivated": True}, ) self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["deactivated"]) + self.assertTrue(channel.json_body["deactivated"]) + self.assertIsNone(channel.json_body["password_hash"]) self.assertEqual(0, len(channel.json_body["threepids"])) self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) self.assertEqual("User", channel.json_body["displayname"]) @@ -1461,7 +1471,8 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["deactivated"]) + self.assertTrue(channel.json_body["deactivated"]) + self.assertIsNone(channel.json_body["password_hash"]) self.assertEqual(0, len(channel.json_body["threepids"])) self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"]) self.assertEqual("User", channel.json_body["displayname"]) @@ -1478,41 +1489,37 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertTrue(profile["display_name"] == "User") # Deactivate user - body = json.dumps({"deactivated": True}) - channel = self.make_request( "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"deactivated": True}, ) self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["deactivated"]) + self.assertTrue(channel.json_body["deactivated"]) # is not in user directory profile = self.get_success(self.store.get_user_in_directory(self.other_user)) - self.assertTrue(profile is None) + self.assertIsNone(profile) # Set new displayname user - body = json.dumps({"displayname": "Foobar"}) - channel = self.make_request( "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"displayname": "Foobar"}, ) self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["deactivated"]) + self.assertTrue(channel.json_body["deactivated"]) self.assertEqual("Foobar", channel.json_body["displayname"]) # is not in user directory profile = self.get_success(self.store.get_user_in_directory(self.other_user)) - self.assertTrue(profile is None) + self.assertIsNone(profile) def test_reactivate_user(self): """ @@ -1520,24 +1527,14 @@ class UserRestTestCase(unittest.HomeserverTestCase): """ # Deactivate the user. - channel = self.make_request( - "PUT", - self.url_other_user, - access_token=self.admin_user_tok, - content=json.dumps({"deactivated": True}).encode(encoding="utf_8"), - ) - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - self._is_erased("@user:test", False) - d = self.store.mark_user_erased("@user:test") - self.assertIsNone(self.get_success(d)) - self._is_erased("@user:test", True) + self._deactivate_user("@user:test") # Attempt to reactivate the user (without a password). channel = self.make_request( "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=json.dumps({"deactivated": False}).encode(encoding="utf_8"), + content={"deactivated": False}, ) self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) @@ -1546,22 +1543,76 @@ class UserRestTestCase(unittest.HomeserverTestCase): "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=json.dumps({"deactivated": False, "password": "foo"}).encode( - encoding="utf_8" - ), + content={"deactivated": False, "password": "foo"}, ) - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) - - # Get user - channel = self.make_request( - "GET", - self.url_other_user, - access_token=self.admin_user_tok, - ) - self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(False, channel.json_body["deactivated"]) + self.assertFalse(channel.json_body["deactivated"]) + self.assertIsNotNone(channel.json_body["password_hash"]) + self._is_erased("@user:test", False) + + @override_config({"password_config": {"localdb_enabled": False}}) + def test_reactivate_user_localdb_disabled(self): + """ + Test reactivating another user when using SSO. + """ + + # Deactivate the user. + self._deactivate_user("@user:test") + + # Reactivate the user with a password + channel = self.make_request( + "PUT", + self.url_other_user, + access_token=self.admin_user_tok, + content={"deactivated": False, "password": "foo"}, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + # Reactivate the user without a password. + channel = self.make_request( + "PUT", + self.url_other_user, + access_token=self.admin_user_tok, + content={"deactivated": False}, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual("@user:test", channel.json_body["name"]) + self.assertFalse(channel.json_body["deactivated"]) + self.assertIsNone(channel.json_body["password_hash"]) + self._is_erased("@user:test", False) + + @override_config({"password_config": {"enabled": False}}) + def test_reactivate_user_password_disabled(self): + """ + Test reactivating another user when using SSO. + """ + + # Deactivate the user. + self._deactivate_user("@user:test") + + # Reactivate the user with a password + channel = self.make_request( + "PUT", + self.url_other_user, + access_token=self.admin_user_tok, + content={"deactivated": False, "password": "foo"}, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + # Reactivate the user without a password. + channel = self.make_request( + "PUT", + self.url_other_user, + access_token=self.admin_user_tok, + content={"deactivated": False}, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual("@user:test", channel.json_body["name"]) + self.assertFalse(channel.json_body["deactivated"]) + self.assertIsNone(channel.json_body["password_hash"]) self._is_erased("@user:test", False) def test_set_user_as_admin(self): @@ -1570,18 +1621,16 @@ class UserRestTestCase(unittest.HomeserverTestCase): """ # Set a user as an admin - body = json.dumps({"admin": True}) - channel = self.make_request( "PUT", self.url_other_user, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"admin": True}, ) self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["admin"]) + self.assertTrue(channel.json_body["admin"]) # Get user channel = self.make_request( @@ -1592,7 +1641,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual("@user:test", channel.json_body["name"]) - self.assertEqual(True, channel.json_body["admin"]) + self.assertTrue(channel.json_body["admin"]) def test_accidental_deactivation_prevention(self): """ @@ -1602,13 +1651,11 @@ class UserRestTestCase(unittest.HomeserverTestCase): url = "/_synapse/admin/v2/users/@bob:test" # Create user - body = json.dumps({"password": "abc123"}) - channel = self.make_request( "PUT", url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"password": "abc123"}, ) self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"]) @@ -1628,13 +1675,11 @@ class UserRestTestCase(unittest.HomeserverTestCase): self.assertEqual(0, channel.json_body["deactivated"]) # Change password (and use a str for deactivate instead of a bool) - body = json.dumps({"password": "abc123", "deactivated": "false"}) # oops! - channel = self.make_request( "PUT", url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"password": "abc123", "deactivated": "false"}, ) self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) @@ -1653,7 +1698,7 @@ class UserRestTestCase(unittest.HomeserverTestCase): # Ensure they're still alive self.assertEqual(0, channel.json_body["deactivated"]) - def _is_erased(self, user_id, expect): + def _is_erased(self, user_id: str, expect: bool) -> None: """Assert that the user is erased or not""" d = self.store.is_user_erased(user_id) if expect: @@ -1661,6 +1706,24 @@ class UserRestTestCase(unittest.HomeserverTestCase): else: self.assertFalse(self.get_success(d)) + def _deactivate_user(self, user_id: str) -> None: + """Deactivate user and set as erased""" + + # Deactivate the user. + channel = self.make_request( + "PUT", + "/_synapse/admin/v2/users/%s" % urllib.parse.quote(user_id), + access_token=self.admin_user_tok, + content={"deactivated": True}, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertTrue(channel.json_body["deactivated"]) + self.assertIsNone(channel.json_body["password_hash"]) + self._is_erased(user_id, False) + d = self.store.mark_user_erased(user_id) + self.assertIsNone(self.get_success(d)) + self._is_erased(user_id, True) + class UserMembershipRestTestCase(unittest.HomeserverTestCase): From 066c703729d72b5da8bb6574d7f7f5f13e12f773 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Thu, 18 Mar 2021 18:37:19 +0100 Subject: [PATCH 13/52] Move support for MSC3026 behind an experimental flag --- synapse/app/generic_worker.py | 7 ++++++- synapse/config/experimental.py | 2 ++ synapse/handlers/presence.py | 12 ++++++++++-- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 236d98a29..207d5ccd0 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -302,6 +302,8 @@ class GenericWorkerPresence(BasePresenceHandler): self.send_stop_syncing, UPDATE_SYNCING_USERS_MS ) + self._busy_presence_enabled = hs.config.experimental.msc3026_enabled + hs.get_reactor().addSystemEventTrigger( "before", "shutdown", @@ -439,8 +441,11 @@ class GenericWorkerPresence(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, - PresenceState.BUSY, ) + + if self._busy_presence_enabled: + valid_presence += (PresenceState.BUSY,) + if presence not in valid_presence: raise SynapseError(400, "Invalid presence state") diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index b1c1c51e4..2f0cd0cfd 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -27,3 +27,5 @@ class ExperimentalConfig(Config): # MSC2858 (multiple SSO identity providers) self.msc2858_enabled = experimental.get("msc2858_enabled", False) # type: bool + # MSC3026 (busy presence state) + self.msc3026_enabled = experimental.get("msc3026_enabled", False) # type: bool diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index bcb99f627..372017590 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -104,6 +104,8 @@ class BasePresenceHandler(abc.ABC): self.clock = hs.get_clock() self.store = hs.get_datastore() + self._busy_presence_enabled = hs.config.experimental.msc3026_enabled + active_presence = self.store.take_presence_startup_info() self.user_to_current_state = {state.user_id: state for state in active_presence} @@ -730,8 +732,11 @@ class PresenceHandler(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, - PresenceState.BUSY, ) + + if self._busy_presence_enabled: + valid_presence += (PresenceState.BUSY,) + if presence not in valid_presence: raise SynapseError(400, "Invalid presence state") @@ -745,7 +750,10 @@ class PresenceHandler(BasePresenceHandler): msg = status_msg if presence != PresenceState.OFFLINE else None new_fields["status_msg"] = msg - if presence == PresenceState.ONLINE or presence == PresenceState.BUSY: + if ( + presence == PresenceState.ONLINE or + (self._busy_presence_enabled and presence == PresenceState.BUSY) + ): new_fields["last_active_ts"] = self.clock.time_msec() await self._update_states([prev_state.copy_and_replace(**new_fields)]) From 004234f03abad58b2de28abff406391a6d182e48 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 18 Mar 2021 18:24:16 +0000 Subject: [PATCH 14/52] Initial spaces summary API (#9643) This is very bare-bones for now: federation will come soon, while pagination is descoped for now but will come later. --- changelog.d/9643.feature | 1 + synapse/api/constants.py | 6 + synapse/config/experimental.py | 3 + synapse/handlers/space_summary.py | 199 ++++++++++++++++++++++++++++++ synapse/rest/client/v1/room.py | 66 +++++++++- synapse/server.py | 5 + 6 files changed, 277 insertions(+), 3 deletions(-) create mode 100644 changelog.d/9643.feature create mode 100644 synapse/handlers/space_summary.py diff --git a/changelog.d/9643.feature b/changelog.d/9643.feature new file mode 100644 index 000000000..2f7ccedcf --- /dev/null +++ b/changelog.d/9643.feature @@ -0,0 +1 @@ +Add initial experimental support for a "space summary" API. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 691f8f9ad..ed050c810 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -100,6 +100,9 @@ class EventTypes: Dummy = "org.matrix.dummy_event" + MSC1772_SPACE_CHILD = "org.matrix.msc1772.space.child" + MSC1772_SPACE_PARENT = "org.matrix.msc1772.space.parent" + class EduTypes: Presence = "m.presence" @@ -160,6 +163,9 @@ class EventContentFields: # cf https://github.com/matrix-org/matrix-doc/pull/2228 SELF_DESTRUCT_AFTER = "org.matrix.self_destruct_after" + # cf https://github.com/matrix-org/matrix-doc/pull/1772 + MSC1772_ROOM_TYPE = "org.matrix.msc1772.type" + class RoomEncryptionAlgorithms: MEGOLM_V1_AES_SHA2 = "m.megolm.v1.aes-sha2" diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index b1c1c51e4..5554bcea8 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -27,3 +27,6 @@ class ExperimentalConfig(Config): # MSC2858 (multiple SSO identity providers) self.msc2858_enabled = experimental.get("msc2858_enabled", False) # type: bool + + # Spaces (MSC1772, MSC2946, etc) + self.spaces_enabled = experimental.get("spaces_enabled", False) # type: bool diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py new file mode 100644 index 000000000..513dc0c71 --- /dev/null +++ b/synapse/handlers/space_summary.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import logging +from collections import deque +from typing import TYPE_CHECKING, Iterable, List, Optional, Set + +from synapse.api.constants import EventContentFields, EventTypes, HistoryVisibility +from synapse.api.errors import AuthError +from synapse.events import EventBase +from synapse.events.utils import format_event_for_client_v2 +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + +# number of rooms to return. We'll stop once we hit this limit. +# TODO: allow clients to reduce this with a request param. +MAX_ROOMS = 50 + +# max number of events to return per room. +MAX_ROOMS_PER_SPACE = 50 + + +class SpaceSummaryHandler: + def __init__(self, hs: "HomeServer"): + self._clock = hs.get_clock() + self._auth = hs.get_auth() + self._room_list_handler = hs.get_room_list_handler() + self._state_handler = hs.get_state_handler() + self._store = hs.get_datastore() + self._event_serializer = hs.get_event_client_serializer() + + async def get_space_summary( + self, + requester: str, + room_id: str, + suggested_only: bool = False, + max_rooms_per_space: Optional[int] = None, + ) -> JsonDict: + """ + Implementation of the space summary API + + Args: + requester: user id of the user making this request + + room_id: room id to start the summary at + + suggested_only: whether we should only return children with the "suggested" + flag set. + + max_rooms_per_space: an optional limit on the number of child rooms we will + return. This does not apply to the root room (ie, room_id), and + is overridden by ROOMS_PER_SPACE_LIMIT. + + Returns: + summary dict to return + """ + # first of all, check that the user is in the room in question (or it's + # world-readable) + await self._auth.check_user_in_room_or_world_readable(room_id, requester) + + # the queue of rooms to process + room_queue = deque((room_id,)) + + processed_rooms = set() # type: Set[str] + + rooms_result = [] # type: List[JsonDict] + events_result = [] # type: List[JsonDict] + + now = self._clock.time_msec() + + while room_queue and len(rooms_result) < MAX_ROOMS: + room_id = room_queue.popleft() + logger.debug("Processing room %s", room_id) + processed_rooms.add(room_id) + + try: + await self._auth.check_user_in_room_or_world_readable( + room_id, requester + ) + except AuthError: + logger.info( + "user %s cannot view room %s, omitting from summary", + requester, + room_id, + ) + continue + + room_entry = await self._build_room_entry(room_id) + rooms_result.append(room_entry) + + # look for child rooms/spaces. + child_events = await self._get_child_events(room_id) + + if suggested_only: + # we only care about suggested children + child_events = filter(_is_suggested_child_event, child_events) + + # The client-specified max_rooms_per_space limit doesn't apply to the + # room_id specified in the request, so we ignore it if this is the + # first room we are processing. Otherwise, apply any client-specified + # limit, capping to our built-in limit. + if max_rooms_per_space is not None and len(processed_rooms) > 1: + max_rooms = min(MAX_ROOMS_PER_SPACE, max_rooms_per_space) + else: + max_rooms = MAX_ROOMS_PER_SPACE + + for edge_event in itertools.islice(child_events, max_rooms): + edge_room_id = edge_event.state_key + + events_result.append( + await self._event_serializer.serialize_event( + edge_event, + time_now=now, + event_format=format_event_for_client_v2, + ) + ) + + # if we haven't yet visited the target of this link, add it to the queue + if edge_room_id not in processed_rooms: + room_queue.append(edge_room_id) + + return {"rooms": rooms_result, "events": events_result} + + async def _build_room_entry(self, room_id: str) -> JsonDict: + """Generate en entry suitable for the 'rooms' list in the summary response""" + stats = await self._store.get_room_with_stats(room_id) + + # currently this should be impossible because we call + # check_user_in_room_or_world_readable on the room before we get here, so + # there should always be an entry + assert stats is not None, "unable to retrieve stats for %s" % (room_id,) + + current_state_ids = await self._store.get_current_state_ids(room_id) + create_event = await self._store.get_event( + current_state_ids[(EventTypes.Create, "")] + ) + + # TODO: update once MSC1772 lands + room_type = create_event.content.get(EventContentFields.MSC1772_ROOM_TYPE) + + entry = { + "room_id": stats["room_id"], + "name": stats["name"], + "topic": stats["topic"], + "canonical_alias": stats["canonical_alias"], + "num_joined_members": stats["joined_members"], + "avatar_url": stats["avatar"], + "world_readable": ( + stats["history_visibility"] == HistoryVisibility.WORLD_READABLE + ), + "guest_can_join": stats["guest_access"] == "can_join", + "room_type": room_type, + } + + # Filter out Nones – rather omit the field altogether + room_entry = {k: v for k, v in entry.items() if v is not None} + + return room_entry + + async def _get_child_events(self, room_id: str) -> Iterable[EventBase]: + # look for child rooms/spaces. + current_state_ids = await self._store.get_current_state_ids(room_id) + + events = await self._store.get_events_as_list( + [ + event_id + for key, event_id in current_state_ids.items() + # TODO: update once MSC1772 lands + if key[0] == EventTypes.MSC1772_SPACE_CHILD + ] + ) + + # filter out any events without a "via" (which implies it has been redacted) + return (e for e in events if e.content.get("via")) + + +def _is_suggested_child_event(edge_event: EventBase) -> bool: + suggested = edge_event.content.get("suggested") + if isinstance(suggested, bool) and suggested: + return True + logger.debug("Ignorning not-suggested child %s", edge_event.state_key) + return False diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index e7a8207eb..f78d79226 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -18,9 +18,11 @@ import logging import re -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List, Optional, Tuple from urllib import parse as urlparse +from twisted.web.server import Request + from synapse.api.constants import EventTypes, Membership from synapse.api.errors import ( AuthError, @@ -35,6 +37,7 @@ from synapse.events.utils import format_event_for_client_v2 from synapse.http.servlet import ( RestServlet, assert_params_in_dict, + parse_boolean, parse_integer, parse_json_object_from_request, parse_string, @@ -44,7 +47,14 @@ from synapse.rest.client.transactions import HttpTransactionCache from synapse.rest.client.v2_alpha._base import client_patterns from synapse.storage.state import StateFilter from synapse.streams.config import PaginationConfig -from synapse.types import RoomAlias, RoomID, StreamToken, ThirdPartyInstanceID, UserID +from synapse.types import ( + JsonDict, + RoomAlias, + RoomID, + StreamToken, + ThirdPartyInstanceID, + UserID, +) from synapse.util import json_decoder from synapse.util.stringutils import parse_and_validate_server_name, random_string @@ -987,7 +997,54 @@ def register_txn_path(servlet, regex_string, http_server, with_get=False): ) -def register_servlets(hs, http_server, is_worker=False): +class RoomSpaceSummaryRestServlet(RestServlet): + PATTERNS = ( + re.compile( + "^/_matrix/client/unstable/org.matrix.msc2946" + "/rooms/(?P[^/]*)/spaces$" + ), + ) + + def __init__(self, hs: "synapse.server.HomeServer"): + super().__init__() + self._auth = hs.get_auth() + self._space_summary_handler = hs.get_space_summary_handler() + + async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + requester = await self._auth.get_user_by_req(request, allow_guest=True) + + return 200, await self._space_summary_handler.get_space_summary( + requester.user.to_string(), + room_id, + suggested_only=parse_boolean(request, "suggested_only", default=False), + max_rooms_per_space=parse_integer(request, "max_rooms_per_space"), + ) + + async def on_POST(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + requester = await self._auth.get_user_by_req(request, allow_guest=True) + content = parse_json_object_from_request(request) + + suggested_only = content.get("suggested_only", False) + if not isinstance(suggested_only, bool): + raise SynapseError( + 400, "'suggested_only' must be a boolean", Codes.BAD_JSON + ) + + max_rooms_per_space = content.get("max_rooms_per_space") + if max_rooms_per_space is not None and not isinstance(max_rooms_per_space, int): + raise SynapseError( + 400, "'max_rooms_per_space' must be an integer", Codes.BAD_JSON + ) + + return 200, await self._space_summary_handler.get_space_summary( + requester.user.to_string(), + room_id, + suggested_only=suggested_only, + max_rooms_per_space=max_rooms_per_space, + ) + + +def register_servlets(hs: "synapse.server.HomeServer", http_server, is_worker=False): RoomStateEventRestServlet(hs).register(http_server) RoomMemberListRestServlet(hs).register(http_server) JoinedRoomMemberListRestServlet(hs).register(http_server) @@ -1001,6 +1058,9 @@ def register_servlets(hs, http_server, is_worker=False): RoomTypingRestServlet(hs).register(http_server) RoomEventContextServlet(hs).register(http_server) + if hs.config.experimental.spaces_enabled: + RoomSpaceSummaryRestServlet(hs).register(http_server) + # Some servlets only get registered for the main process. if not is_worker: RoomCreateRestServlet(hs).register(http_server) diff --git a/synapse/server.py b/synapse/server.py index d11d08c57..98822d8e2 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -100,6 +100,7 @@ from synapse.handlers.room_member import RoomMemberHandler, RoomMemberMasterHand from synapse.handlers.room_member_worker import RoomMemberWorkerHandler from synapse.handlers.search import SearchHandler from synapse.handlers.set_password import SetPasswordHandler +from synapse.handlers.space_summary import SpaceSummaryHandler from synapse.handlers.sso import SsoHandler from synapse.handlers.stats import StatsHandler from synapse.handlers.sync import SyncHandler @@ -732,6 +733,10 @@ class HomeServer(metaclass=abc.ABCMeta): def get_account_data_handler(self) -> AccountDataHandler: return AccountDataHandler(self) + @cache_in_self + def get_space_summary_handler(self) -> SpaceSummaryHandler: + return SpaceSummaryHandler(self) + @cache_in_self def get_external_cache(self) -> ExternalCache: return ExternalCache(self) From 9b0e3009fa4722472fa7c576bcf44228fa8f6a1f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 18 Mar 2021 14:39:05 -0400 Subject: [PATCH 15/52] Fix type-hints from bad merge. --- synapse/rest/client/v1/room.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index f78d79226..b7aa82a65 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -1005,7 +1005,7 @@ class RoomSpaceSummaryRestServlet(RestServlet): ), ) - def __init__(self, hs: "synapse.server.HomeServer"): + def __init__(self, hs: "HomeServer"): super().__init__() self._auth = hs.get_auth() self._space_summary_handler = hs.get_space_summary_handler() @@ -1044,7 +1044,7 @@ class RoomSpaceSummaryRestServlet(RestServlet): ) -def register_servlets(hs: "synapse.server.HomeServer", http_server, is_worker=False): +def register_servlets(hs: "HomeServer", http_server, is_worker=False): RoomStateEventRestServlet(hs).register(http_server) RoomMemberListRestServlet(hs).register(http_server) JoinedRoomMemberListRestServlet(hs).register(http_server) From 201178db1aea302d714d6da4dc188e9d103ee474 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 18 Mar 2021 20:31:47 +0000 Subject: [PATCH 16/52] federation_client: stop adding URL prefix (#9645) --- changelog.d/9645.misc | 1 + scripts-dev/federation_client.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9645.misc diff --git a/changelog.d/9645.misc b/changelog.d/9645.misc new file mode 100644 index 000000000..9a7ce364c --- /dev/null +++ b/changelog.d/9645.misc @@ -0,0 +1 @@ +In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index abcec48c4..657919a0d 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -223,7 +223,7 @@ def main(): parser.add_argument("--body", help="Data to send as the body of the HTTP request") parser.add_argument( - "path", help="request path. We will add '/_matrix/federation/v1/' to this." + "path", help="request path, including the '/_matrix/federation/...' prefix." ) args = parser.parse_args() @@ -239,7 +239,7 @@ def main(): args.server_name, key, args.destination, - "/_matrix/federation/v1/" + args.path, + args.path, content=args.body, ) From 0e355847347d6a2334dd8879174b1b6806b2a38b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 18 Mar 2021 21:12:07 +0000 Subject: [PATCH 17/52] federation_client: handle inline signing_keys in hs.yaml (#9647) --- changelog.d/9647.misc | 1 + scripts-dev/federation_client.py | 71 ++++++++------------------------ 2 files changed, 18 insertions(+), 54 deletions(-) create mode 100644 changelog.d/9647.misc diff --git a/changelog.d/9647.misc b/changelog.d/9647.misc new file mode 100644 index 000000000..303a8c660 --- /dev/null +++ b/changelog.d/9647.misc @@ -0,0 +1 @@ +In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index 657919a0d..6f76c08fc 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -22,8 +22,8 @@ import sys from typing import Any, Optional from urllib import parse as urlparse -import nacl.signing import requests +import signedjson.key import signedjson.types import srvlookup import yaml @@ -44,18 +44,6 @@ def encode_base64(input_bytes): return output_string -def decode_base64(input_string): - """Decode a base64 string to bytes inferring padding from the length of the - string.""" - - input_bytes = input_string.encode("ascii") - input_len = len(input_bytes) - padding = b"=" * (3 - ((input_len + 3) % 4)) - output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2 - output_bytes = base64.b64decode(input_bytes + padding) - return output_bytes[:output_len] - - def encode_canonical_json(value): return json.dumps( value, @@ -88,42 +76,6 @@ def sign_json( return json_object -NACL_ED25519 = "ed25519" - - -def decode_signing_key_base64(algorithm, version, key_base64): - """Decode a base64 encoded signing key - Args: - algorithm (str): The algorithm the key is for (currently "ed25519"). - version (str): Identifies this key out of the keys for this entity. - key_base64 (str): Base64 encoded bytes of the key. - Returns: - A SigningKey object. - """ - if algorithm == NACL_ED25519: - key_bytes = decode_base64(key_base64) - key = nacl.signing.SigningKey(key_bytes) - key.version = version - key.alg = NACL_ED25519 - return key - else: - raise ValueError("Unsupported algorithm %s" % (algorithm,)) - - -def read_signing_keys(stream): - """Reads a list of keys from a stream - Args: - stream : A stream to iterate for keys. - Returns: - list of SigningKey objects. - """ - keys = [] - for line in stream: - algorithm, version, key_base64 = line.split() - keys.append(decode_signing_key_base64(algorithm, version, key_base64)) - return keys - - def request( method: Optional[str], origin_name: str, @@ -228,11 +180,16 @@ def main(): args = parser.parse_args() - if not args.server_name or not args.signing_key_path: + args.signing_key = None + if args.signing_key_path: + with open(args.signing_key_path) as f: + args.signing_key = f.readline() + + if not args.server_name or not args.signing_key: read_args_from_config(args) - with open(args.signing_key_path) as f: - key = read_signing_keys(f)[0] + algorithm, version, key_base64 = args.signing_key.split() + key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64) result = request( args.method, @@ -255,10 +212,16 @@ def main(): def read_args_from_config(args): with open(args.config, "r") as fh: config = yaml.safe_load(fh) + if not args.server_name: args.server_name = config["server_name"] - if not args.signing_key_path: - args.signing_key_path = config["signing_key_path"] + + if not args.signing_key: + if "signing_key" in config: + args.signing_key = config["signing_key"] + else: + with open(config["signing_key_path"]) as f: + args.signing_key = f.readline() class MatrixConnectionAdapter(HTTPAdapter): From 066068f03478753b7d838ae49e87d7a6cde80fd6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 19 Mar 2021 12:20:11 +0000 Subject: [PATCH 18/52] fix mypy --- synapse/rest/client/v1/room.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index b7aa82a65..6c722d634 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -21,8 +21,6 @@ import re from typing import TYPE_CHECKING, List, Optional, Tuple from urllib import parse as urlparse -from twisted.web.server import Request - from synapse.api.constants import EventTypes, Membership from synapse.api.errors import ( AuthError, @@ -42,6 +40,7 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string, ) +from synapse.http.site import SynapseRequest from synapse.logging.opentracing import set_tag from synapse.rest.client.transactions import HttpTransactionCache from synapse.rest.client.v2_alpha._base import client_patterns @@ -1010,7 +1009,9 @@ class RoomSpaceSummaryRestServlet(RestServlet): self._auth = hs.get_auth() self._space_summary_handler = hs.get_space_summary_handler() - async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + async def on_GET( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request, allow_guest=True) return 200, await self._space_summary_handler.get_space_summary( @@ -1020,7 +1021,9 @@ class RoomSpaceSummaryRestServlet(RestServlet): max_rooms_per_space=parse_integer(request, "max_rooms_per_space"), ) - async def on_POST(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + async def on_POST( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request, allow_guest=True) content = parse_json_object_from_request(request) From 0b56481caafc56c2e624d4b6506c91fc3913615e Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 19 Mar 2021 16:11:08 +0100 Subject: [PATCH 19/52] Fix lint --- synapse/app/generic_worker.py | 8 ++++---- synapse/handlers/presence.py | 13 ++++++------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 207d5ccd0..caef394e1 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -441,12 +441,12 @@ class GenericWorkerPresence(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, + PresenceState.BUSY, ) - if self._busy_presence_enabled: - valid_presence += (PresenceState.BUSY,) - - if presence not in valid_presence: + if presence not in valid_presence or ( + presence == PresenceState.BUSY and not self._busy_presence_enabled + ): raise SynapseError(400, "Invalid presence state") user_id = target_user.to_string() diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 372017590..492c4478f 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -732,12 +732,12 @@ class PresenceHandler(BasePresenceHandler): PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, + PresenceState.BUSY, ) - if self._busy_presence_enabled: - valid_presence += (PresenceState.BUSY,) - - if presence not in valid_presence: + if presence not in valid_presence or ( + presence == PresenceState.BUSY and not self._busy_presence_enabled + ): raise SynapseError(400, "Invalid presence state") user_id = target_user.to_string() @@ -750,9 +750,8 @@ class PresenceHandler(BasePresenceHandler): msg = status_msg if presence != PresenceState.OFFLINE else None new_fields["status_msg"] = msg - if ( - presence == PresenceState.ONLINE or - (self._busy_presence_enabled and presence == PresenceState.BUSY) + if presence == PresenceState.ONLINE or ( + self._busy_presence_enabled and presence == PresenceState.BUSY ): new_fields["last_active_ts"] = self.clock.time_msec() From b6ed4f55acf7af44af1d33097407d2dd7f08b5a5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 19 Mar 2021 18:19:50 +0100 Subject: [PATCH 20/52] Incorporate review --- synapse/handlers/presence.py | 2 +- synapse/rest/client/versions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 492c4478f..da92feacc 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -751,7 +751,7 @@ class PresenceHandler(BasePresenceHandler): new_fields["status_msg"] = msg if presence == PresenceState.ONLINE or ( - self._busy_presence_enabled and presence == PresenceState.BUSY + presence == PresenceState.BUSY and self._busy_presence_enabled ): new_fields["last_active_ts"] = self.clock.time_msec() diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index f387d29b5..3e3d8839f 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -82,7 +82,7 @@ class VersionsRestServlet(RestServlet): "io.element.e2ee_forced.private": self.e2ee_forced_private, "io.element.e2ee_forced.trusted_private": self.e2ee_forced_trusted_private, # Supports the busy presence state described in MSC3026. - "org.matrix.msc3026.busy_presence": True, + "org.matrix.msc3026.busy_presence": self.config.experimental.msc3026_enabled, }, }, ) From d66f9070cd0f826e5b6630f8e1f6ed5837a3c3cb Mon Sep 17 00:00:00 2001 From: Ankit Dobhal Date: Mon, 22 Mar 2021 20:48:13 +0530 Subject: [PATCH 21/52] Fixed code misc. quality issues (#9649) - Merge 'isinstance' calls. - Remove unnecessary dict call outside of comprehension. - Use 'sys.exit()' calls. --- changelog.d/9649.misc | 1 + scripts/move_remote_media_to_new_store.py | 2 +- synapse/push/httppusher.py | 2 +- synapse/util/frozenutils.py | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/9649.misc diff --git a/changelog.d/9649.misc b/changelog.d/9649.misc new file mode 100644 index 000000000..58c5fd053 --- /dev/null +++ b/changelog.d/9649.misc @@ -0,0 +1 @@ +Fixed some antipattern issues to improve code quality. diff --git a/scripts/move_remote_media_to_new_store.py b/scripts/move_remote_media_to_new_store.py index ab2e76338..8477955a9 100755 --- a/scripts/move_remote_media_to_new_store.py +++ b/scripts/move_remote_media_to_new_store.py @@ -51,7 +51,7 @@ def main(src_repo, dest_repo): parts = line.split("|") if len(parts) != 2: print("Unable to parse input line %s" % line, file=sys.stderr) - exit(1) + sys.exit(1) move_media(parts[0], parts[1], src_paths, dest_paths) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index eb6de8ba7..026134ae2 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -290,7 +290,7 @@ class HttpPusher(Pusher): if rejected is False: return False - if isinstance(rejected, list) or isinstance(rejected, tuple): + if isinstance(rejected, (list, tuple)): for pk in rejected: if pk != self.pushkey: # for sanity, we only remove the pushkey if it diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 5f7a6dd1d..5ca2e71e6 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -36,7 +36,7 @@ def freeze(o): def unfreeze(o): if isinstance(o, (dict, frozendict)): - return dict({k: unfreeze(v) for k, v in o.items()}) + return {k: unfreeze(v) for k, v in o.items()} if isinstance(o, (bytes, str)): return o From 4612302399dc95ba781af6ebc9eedc768f7f0a7d Mon Sep 17 00:00:00 2001 From: Johannes Wienke Date: Mon, 22 Mar 2021 16:31:00 +0100 Subject: [PATCH 22/52] Include opencontainers labels in Docker image (#9612) Cf. https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys Signed-off-by: Johannes Wienke --- changelog.d/9612.docker | 1 + docker/Dockerfile | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 changelog.d/9612.docker diff --git a/changelog.d/9612.docker b/changelog.d/9612.docker new file mode 100644 index 000000000..d95c503c8 --- /dev/null +++ b/changelog.d/9612.docker @@ -0,0 +1 @@ +Include [opencontainers labels](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys) in the Docker image. diff --git a/docker/Dockerfile b/docker/Dockerfile index def450154..7cd4dd7d1 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -18,6 +18,11 @@ ARG PYTHON_VERSION=3.8 ### FROM docker.io/python:${PYTHON_VERSION}-slim as builder +LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse' +LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md' +LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git' +LABEL org.opencontainers.image.licenses='Apache-2.0' + # install the OS build deps RUN apt-get update && apt-get install -y \ build-essential \ From 5b268997bdde04c905eed0a7c04e9e2352e35fba Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 22 Mar 2021 17:20:47 +0000 Subject: [PATCH 23/52] Allow providing credentials to HTTPS_PROXY (#9657) Addresses https://github.com/matrix-org/synapse-dinsic/issues/70 This PR causes `ProxyAgent` to attempt to extract credentials from an `HTTPS_PROXY` env var. If credentials are found, a `Proxy-Authorization` header ([details](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Proxy-Authorization)) is sent to the proxy server to authenticate against it. The headers are *not* passed to the remote server. Also added some type hints. --- changelog.d/9657.feature | 1 + synapse/http/connectproxyclient.py | 96 ++++++++++++++++++++++-------- synapse/http/proxyagent.py | 81 ++++++++++++++++++++++--- tests/http/test_proxyagent.py | 40 +++++++++++++ 4 files changed, 184 insertions(+), 34 deletions(-) create mode 100644 changelog.d/9657.feature diff --git a/changelog.d/9657.feature b/changelog.d/9657.feature new file mode 100644 index 000000000..c56a615a8 --- /dev/null +++ b/changelog.d/9657.feature @@ -0,0 +1 @@ +Add support for credentials for proxy authentication in the `HTTPS_PROXY` environment variable. diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py index 856e28454..b797e3ce8 100644 --- a/synapse/http/connectproxyclient.py +++ b/synapse/http/connectproxyclient.py @@ -19,9 +19,10 @@ from zope.interface import implementer from twisted.internet import defer, protocol from twisted.internet.error import ConnectError -from twisted.internet.interfaces import IStreamClientEndpoint -from twisted.internet.protocol import connectionDone +from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint +from twisted.internet.protocol import ClientFactory, Protocol, connectionDone from twisted.web import http +from twisted.web.http_headers import Headers logger = logging.getLogger(__name__) @@ -43,23 +44,33 @@ class HTTPConnectProxyEndpoint: Args: reactor: the Twisted reactor to use for the connection - proxy_endpoint (IStreamClientEndpoint): the endpoint to use to connect to the - proxy - host (bytes): hostname that we want to CONNECT to - port (int): port that we want to connect to + proxy_endpoint: the endpoint to use to connect to the proxy + host: hostname that we want to CONNECT to + port: port that we want to connect to + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, reactor, proxy_endpoint, host, port): + def __init__( + self, + reactor: IReactorCore, + proxy_endpoint: IStreamClientEndpoint, + host: bytes, + port: int, + headers: Headers, + ): self._reactor = reactor self._proxy_endpoint = proxy_endpoint self._host = host self._port = port + self._headers = headers def __repr__(self): return "" % (self._proxy_endpoint,) - def connect(self, protocolFactory): - f = HTTPProxiedClientFactory(self._host, self._port, protocolFactory) + def connect(self, protocolFactory: ClientFactory): + f = HTTPProxiedClientFactory( + self._host, self._port, protocolFactory, self._headers + ) d = self._proxy_endpoint.connect(f) # once the tcp socket connects successfully, we need to wait for the # CONNECT to complete. @@ -74,15 +85,23 @@ class HTTPProxiedClientFactory(protocol.ClientFactory): HTTP Protocol object and run the rest of the connection. Args: - dst_host (bytes): hostname that we want to CONNECT to - dst_port (int): port that we want to connect to - wrapped_factory (protocol.ClientFactory): The original Factory + dst_host: hostname that we want to CONNECT to + dst_port: port that we want to connect to + wrapped_factory: The original Factory + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, dst_host, dst_port, wrapped_factory): + def __init__( + self, + dst_host: bytes, + dst_port: int, + wrapped_factory: ClientFactory, + headers: Headers, + ): self.dst_host = dst_host self.dst_port = dst_port self.wrapped_factory = wrapped_factory + self.headers = headers self.on_connection = defer.Deferred() def startedConnecting(self, connector): @@ -92,7 +111,11 @@ class HTTPProxiedClientFactory(protocol.ClientFactory): wrapped_protocol = self.wrapped_factory.buildProtocol(addr) return HTTPConnectProtocol( - self.dst_host, self.dst_port, wrapped_protocol, self.on_connection + self.dst_host, + self.dst_port, + wrapped_protocol, + self.on_connection, + self.headers, ) def clientConnectionFailed(self, connector, reason): @@ -112,24 +135,37 @@ class HTTPConnectProtocol(protocol.Protocol): """Protocol that wraps an existing Protocol to do a CONNECT handshake at connect Args: - host (bytes): The original HTTP(s) hostname or IPv4 or IPv6 address literal + host: The original HTTP(s) hostname or IPv4 or IPv6 address literal to put in the CONNECT request - port (int): The original HTTP(s) port to put in the CONNECT request + port: The original HTTP(s) port to put in the CONNECT request - wrapped_protocol (interfaces.IProtocol): the original protocol (probably - HTTPChannel or TLSMemoryBIOProtocol, but could be anything really) + wrapped_protocol: the original protocol (probably HTTPChannel or + TLSMemoryBIOProtocol, but could be anything really) - connected_deferred (Deferred): a Deferred which will be callbacked with + connected_deferred: a Deferred which will be callbacked with wrapped_protocol when the CONNECT completes + + headers: Extra HTTP headers to include in the CONNECT request """ - def __init__(self, host, port, wrapped_protocol, connected_deferred): + def __init__( + self, + host: bytes, + port: int, + wrapped_protocol: Protocol, + connected_deferred: defer.Deferred, + headers: Headers, + ): self.host = host self.port = port self.wrapped_protocol = wrapped_protocol self.connected_deferred = connected_deferred - self.http_setup_client = HTTPConnectSetupClient(self.host, self.port) + self.headers = headers + + self.http_setup_client = HTTPConnectSetupClient( + self.host, self.port, self.headers + ) self.http_setup_client.on_connected.addCallback(self.proxyConnected) def connectionMade(self): @@ -154,7 +190,7 @@ class HTTPConnectProtocol(protocol.Protocol): if buf: self.wrapped_protocol.dataReceived(buf) - def dataReceived(self, data): + def dataReceived(self, data: bytes): # if we've set up the HTTP protocol, we can send the data there if self.wrapped_protocol.connected: return self.wrapped_protocol.dataReceived(data) @@ -168,21 +204,29 @@ class HTTPConnectSetupClient(http.HTTPClient): """HTTPClient protocol to send a CONNECT message for proxies and read the response. Args: - host (bytes): The hostname to send in the CONNECT message - port (int): The port to send in the CONNECT message + host: The hostname to send in the CONNECT message + port: The port to send in the CONNECT message + headers: Extra headers to send with the CONNECT message """ - def __init__(self, host, port): + def __init__(self, host: bytes, port: int, headers: Headers): self.host = host self.port = port + self.headers = headers self.on_connected = defer.Deferred() def connectionMade(self): logger.debug("Connected to proxy, sending CONNECT") self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port)) + + # Send any additional specified headers + for name, values in self.headers.getAllRawHeaders(): + for value in values: + self.sendHeader(name, value) + self.endHeaders() - def handleStatus(self, version, status, message): + def handleStatus(self, version: bytes, status: bytes, message: bytes): logger.debug("Got Status: %s %s %s", status, message, version) if status != b"200": raise ProxyConnectError("Unexpected status on CONNECT: %s" % status) diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index 3d553ae23..16ec85006 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -12,10 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import base64 import logging import re +from typing import Optional, Tuple from urllib.request import getproxies_environment, proxy_bypass_environment +import attr from zope.interface import implementer from twisted.internet import defer @@ -23,6 +26,7 @@ from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS from twisted.python.failure import Failure from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase from twisted.web.error import SchemeNotSupported +from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint @@ -32,6 +36,22 @@ logger = logging.getLogger(__name__) _VALID_URI = re.compile(br"\A[\x21-\x7e]+\Z") +@attr.s +class ProxyCredentials: + username_password = attr.ib(type=bytes) + + def as_proxy_authorization_value(self) -> bytes: + """ + Return the value for a Proxy-Authorization header (i.e. 'Basic abdef=='). + + Returns: + A transformation of the authentication string the encoded value for + a Proxy-Authorization header. + """ + # Encode as base64 and prepend the authorization type + return b"Basic " + base64.encodebytes(self.username_password) + + @implementer(IAgent) class ProxyAgent(_AgentBase): """An Agent implementation which will use an HTTP proxy if one was requested @@ -96,6 +116,9 @@ class ProxyAgent(_AgentBase): https_proxy = proxies["https"].encode() if "https" in proxies else None no_proxy = proxies["no"] if "no" in proxies else None + # Parse credentials from https proxy connection string if present + self.https_proxy_creds, https_proxy = parse_username_password(https_proxy) + self.http_proxy_endpoint = _http_proxy_endpoint( http_proxy, self.proxy_reactor, **self._endpoint_kwargs ) @@ -175,11 +198,22 @@ class ProxyAgent(_AgentBase): and self.https_proxy_endpoint and not should_skip_proxy ): + connect_headers = Headers() + + # Determine whether we need to set Proxy-Authorization headers + if self.https_proxy_creds: + # Set a Proxy-Authorization header + connect_headers.addRawHeader( + b"Proxy-Authorization", + self.https_proxy_creds.as_proxy_authorization_value(), + ) + endpoint = HTTPConnectProxyEndpoint( self.proxy_reactor, self.https_proxy_endpoint, parsed_uri.host, parsed_uri.port, + headers=connect_headers, ) else: # not using a proxy @@ -208,12 +242,16 @@ class ProxyAgent(_AgentBase): ) -def _http_proxy_endpoint(proxy, reactor, **kwargs): +def _http_proxy_endpoint(proxy: Optional[bytes], reactor, **kwargs): """Parses an http proxy setting and returns an endpoint for the proxy Args: - proxy (bytes|None): the proxy setting + proxy: the proxy setting in the form: [:@][:] + Note that compared to other apps, this function currently lacks support + for specifying a protocol schema (i.e. protocol://...). + reactor: reactor to be used to connect to the proxy + kwargs: other args to be passed to HostnameEndpoint Returns: @@ -223,16 +261,43 @@ def _http_proxy_endpoint(proxy, reactor, **kwargs): if proxy is None: return None - # currently we only support hostname:port. Some apps also support - # protocol://[:port], which allows a way of requiring a TLS connection to the - # proxy. - + # Parse the connection string host, port = parse_host_port(proxy, default_port=1080) return HostnameEndpoint(reactor, host, port, **kwargs) -def parse_host_port(hostport, default_port=None): - # could have sworn we had one of these somewhere else... +def parse_username_password(proxy: bytes) -> Tuple[Optional[ProxyCredentials], bytes]: + """ + Parses the username and password from a proxy declaration e.g + username:password@hostname:port. + + Args: + proxy: The proxy connection string. + + Returns + An instance of ProxyCredentials and the proxy connection string with any credentials + stripped, i.e u:p@host:port -> host:port. If no credentials were found, the + ProxyCredentials instance is replaced with None. + """ + if proxy and b"@" in proxy: + # We use rsplit here as the password could contain an @ character + credentials, proxy_without_credentials = proxy.rsplit(b"@", 1) + return ProxyCredentials(credentials), proxy_without_credentials + + return None, proxy + + +def parse_host_port(hostport: bytes, default_port: int = None) -> Tuple[bytes, int]: + """ + Parse the hostname and port from a proxy connection byte string. + + Args: + hostport: The proxy connection string. Must be in the form 'host[:port]'. + default_port: The default port to return if one is not found in `hostport`. + + Returns: + A tuple containing the hostname and port. Uses `default_port` if one was not found. + """ if b":" in hostport: host, port = hostport.rsplit(b":", 1) try: diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py index 505ffcd30..3ea8b5bec 100644 --- a/tests/http/test_proxyagent.py +++ b/tests/http/test_proxyagent.py @@ -12,8 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import base64 import logging import os +from typing import Optional from unittest.mock import patch import treq @@ -242,6 +244,21 @@ class MatrixFederationAgentTests(TestCase): @patch.dict(os.environ, {"https_proxy": "proxy.com", "no_proxy": "unused.com"}) def test_https_request_via_proxy(self): + """Tests that TLS-encrypted requests can be made through a proxy""" + self._do_https_request_via_proxy(auth_credentials=None) + + @patch.dict( + os.environ, + {"https_proxy": "bob:pinkponies@proxy.com", "no_proxy": "unused.com"}, + ) + def test_https_request_via_proxy_with_auth(self): + """Tests that authenticated, TLS-encrypted requests can be made through a proxy""" + self._do_https_request_via_proxy(auth_credentials="bob:pinkponies") + + def _do_https_request_via_proxy( + self, + auth_credentials: Optional[str] = None, + ): agent = ProxyAgent( self.reactor, contextFactory=get_test_https_policy(), @@ -278,6 +295,22 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b"CONNECT") self.assertEqual(request.path, b"test.com:443") + # Check whether auth credentials have been supplied to the proxy + proxy_auth_header_values = request.requestHeaders.getRawHeaders( + b"Proxy-Authorization" + ) + + if auth_credentials is not None: + # Compute the correct header value for Proxy-Authorization + encoded_credentials = base64.b64encode(b"bob:pinkponies") + expected_header_value = b"Basic " + encoded_credentials + + # Validate the header's value + self.assertIn(expected_header_value, proxy_auth_header_values) + else: + # Check that the Proxy-Authorization header has not been supplied to the proxy + self.assertIsNone(proxy_auth_header_values) + # tell the proxy server not to close the connection proxy_server.persistent = True @@ -312,6 +345,13 @@ class MatrixFederationAgentTests(TestCase): self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/abc") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) + + # Check that the destination server DID NOT receive proxy credentials + proxy_auth_header_values = request.requestHeaders.getRawHeaders( + b"Proxy-Authorization" + ) + self.assertIsNone(proxy_auth_header_values) + request.write(b"result") request.finish() From b7748d3c00e87df8c49346e67d643916487254e4 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 23 Mar 2021 07:12:48 -0400 Subject: [PATCH 24/52] Import HomeServer from the proper module. (#9665) --- changelog.d/9665.misc | 1 + synapse/crypto/keyring.py | 2 +- synapse/federation/federation_client.py | 2 +- synapse/groups/attestations.py | 2 +- synapse/groups/groups_server.py | 2 +- synapse/handlers/_base.py | 2 +- synapse/handlers/account_data.py | 2 +- synapse/handlers/account_validity.py | 2 +- synapse/handlers/acme.py | 2 +- synapse/handlers/admin.py | 2 +- synapse/handlers/appservice.py | 2 +- synapse/handlers/auth.py | 2 +- synapse/handlers/cas_handler.py | 2 +- synapse/handlers/deactivate_account.py | 2 +- synapse/handlers/device.py | 2 +- synapse/handlers/devicemessage.py | 2 +- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/e2e_room_keys.py | 2 +- synapse/handlers/groups_local.py | 2 +- synapse/handlers/password_policy.py | 2 +- synapse/handlers/profile.py | 2 +- synapse/handlers/read_marker.py | 2 +- synapse/handlers/receipts.py | 2 +- synapse/handlers/register.py | 2 +- synapse/handlers/room_list.py | 2 +- synapse/handlers/room_member_worker.py | 2 +- synapse/handlers/search.py | 2 +- synapse/handlers/set_password.py | 2 +- synapse/handlers/state_deltas.py | 2 +- synapse/handlers/stats.py | 2 +- synapse/handlers/user_directory.py | 2 +- synapse/http/client.py | 2 +- synapse/push/__init__.py | 2 +- synapse/push/action_generator.py | 2 +- synapse/push/bulk_push_rule_evaluator.py | 2 +- synapse/push/emailpusher.py | 2 +- synapse/push/httppusher.py | 2 +- synapse/push/mailer.py | 2 +- synapse/push/pusher.py | 2 +- synapse/replication/slave/storage/pushers.py | 2 +- synapse/replication/tcp/streams/_base.py | 2 +- synapse/rest/admin/media.py | 2 +- synapse/rest/client/v1/room.py | 2 +- synapse/rest/client/v2_alpha/account.py | 2 +- synapse/rest/client/v2_alpha/groups.py | 2 +- synapse/rest/media/v1/config_resource.py | 2 +- synapse/rest/media/v1/download_resource.py | 2 +- synapse/rest/media/v1/media_repository.py | 2 +- synapse/rest/media/v1/preview_url_resource.py | 2 +- synapse/rest/media/v1/storage_provider.py | 2 +- synapse/rest/media/v1/thumbnail_resource.py | 2 +- synapse/rest/media/v1/upload_resource.py | 2 +- synapse/storage/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/background_updates.py | 2 +- synapse/storage/databases/main/appservice.py | 2 +- synapse/storage/databases/main/pusher.py | 2 +- synapse/storage/purge_events.py | 2 +- synapse/storage/state.py | 2 +- 59 files changed, 59 insertions(+), 58 deletions(-) create mode 100644 changelog.d/9665.misc diff --git a/changelog.d/9665.misc b/changelog.d/9665.misc new file mode 100644 index 000000000..b8bf76c63 --- /dev/null +++ b/changelog.d/9665.misc @@ -0,0 +1 @@ +Import `HomeServer` from the proper module. diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 902128a23..d5fb51513 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -57,7 +57,7 @@ from synapse.util.metrics import Measure from synapse.util.retryutils import NotRetryingDestination if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index bee81fc01..3b2f51baa 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -62,7 +62,7 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.retryutils import NotRetryingDestination if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index a3f8d92d0..368c44708 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -46,7 +46,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import JsonDict, get_domain_from_id if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index f9a0f4022..4b16a4ac2 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -25,7 +25,7 @@ from synapse.types import GroupID, JsonDict, RoomID, UserID, get_domain_from_id from synapse.util.async_helpers import concurrently_execute if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index d29b066a5..aade2c4a3 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -24,7 +24,7 @@ from synapse.api.ratelimiting import Ratelimiter from synapse.types import UserID if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index b1a5df963..1ce6d697e 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -25,7 +25,7 @@ from synapse.replication.http.account_data import ( from synapse.types import JsonDict, UserID if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer class AccountDataHandler: diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 664d09da1..d781bb251 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -27,7 +27,7 @@ from synapse.types import UserID from synapse.util import stringutils if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py index 132be238d..2a25af628 100644 --- a/synapse/handlers/acme.py +++ b/synapse/handlers/acme.py @@ -24,7 +24,7 @@ from twisted.web.resource import Resource from synapse.app import check_bind_error if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index db68c94c5..c494de49a 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -25,7 +25,7 @@ from synapse.visibility import filter_events_for_client from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index deab8ff2d..996f9e5de 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -38,7 +38,7 @@ from synapse.types import Collection, JsonDict, RoomAlias, RoomStreamToken, User from synapse.util.metrics import Measure if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index badac8c26..d537ea813 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -70,7 +70,7 @@ from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.threepids import canonicalise_email if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/cas_handler.py b/synapse/handlers/cas_handler.py index cb67589f7..5060936f9 100644 --- a/synapse/handlers/cas_handler.py +++ b/synapse/handlers/cas_handler.py @@ -27,7 +27,7 @@ from synapse.http.site import SynapseRequest from synapse.types import UserID, map_username_to_mxid_localpart if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index 3886d3124..2bcd8f543 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -23,7 +23,7 @@ from synapse.types import Requester, UserID, create_requester from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 2fc4951df..54293d0b9 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -45,7 +45,7 @@ from synapse.util.retryutils import NotRetryingDestination from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 7db4f4896..eb547743b 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -32,7 +32,7 @@ from synapse.util import json_encoder from synapse.util.stringutils import random_string if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 9a946a3cf..2ad9b6d93 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -42,7 +42,7 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.retryutils import NotRetryingDestination if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 622cae23b..a910d246d 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -29,7 +29,7 @@ from synapse.types import JsonDict from synapse.util.async_helpers import Linearizer if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index bfb95e3ee..a41ca5df9 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -21,7 +21,7 @@ from synapse.api.errors import HttpResponseException, RequestSendFailed, Synapse from synapse.types import GroupID, JsonDict, get_domain_from_id if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/password_policy.py b/synapse/handlers/password_policy.py index 6c635cc31..92cefa11a 100644 --- a/synapse/handlers/password_policy.py +++ b/synapse/handlers/password_policy.py @@ -21,7 +21,7 @@ from typing import TYPE_CHECKING from synapse.api.errors import Codes, PasswordRefusedError if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index dd59392bd..a755363c3 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -36,7 +36,7 @@ from synapse.types import ( from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 6bb2fd936..a54fe1968 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -21,7 +21,7 @@ from synapse.util.async_helpers import Linearizer from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index 6a6c52884..dbfe9bfac 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -20,7 +20,7 @@ from synapse.handlers._base import BaseHandler from synapse.types import JsonDict, ReadReceipt, get_domain_from_id if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index d7f226d58..0fc2bf15d 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -38,7 +38,7 @@ from synapse.types import RoomAlias, UserID, create_requester from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 8bfc46c65..924b81db7 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -29,7 +29,7 @@ from synapse.util.caches.response_cache import ResponseCache from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index d75506c75..3a90fc0c1 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -26,7 +26,7 @@ from synapse.replication.http.membership import ( from synapse.types import Requester, UserID if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 94062e79c..d742dfbd5 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -30,7 +30,7 @@ from synapse.visibility import filter_events_for_client from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 04e7c64c9..f98a338ec 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -21,7 +21,7 @@ from synapse.types import Requester from ._base import BaseHandler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py index b3f987535..ee8f87e59 100644 --- a/synapse/handlers/state_deltas.py +++ b/synapse/handlers/state_deltas.py @@ -17,7 +17,7 @@ import logging from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 924281144..8730f99d0 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -24,7 +24,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import JsonDict if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 1a8340000..b121286d9 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -25,7 +25,7 @@ from synapse.types import JsonDict from synapse.util.metrics import Measure if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/http/client.py b/synapse/http/client.py index 1e01e0a9f..a0caba84e 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -77,7 +77,7 @@ from synapse.util import json_decoder from synapse.util.async_helpers import timeout_deferred if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index f4f7ec96f..9fc3da49a 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -21,7 +21,7 @@ import attr from synapse.types import JsonDict, RoomStreamToken if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer @attr.s(slots=True) diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py index aaed28650..38a47a600 100644 --- a/synapse/push/action_generator.py +++ b/synapse/push/action_generator.py @@ -22,7 +22,7 @@ from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator from synapse.util.metrics import Measure if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index c016a8390..1897f5915 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -33,7 +33,7 @@ from synapse.util.caches.lrucache import LruCache from .push_rule_evaluator import PushRuleEvaluatorForEvent if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index 3dc06a79e..c0968dc7a 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -24,7 +24,7 @@ from synapse.push import Pusher, PusherConfig, ThrottleParams from synapse.push.mailer import Mailer if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 026134ae2..26af5309c 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -31,7 +31,7 @@ from synapse.push import Pusher, PusherConfig, PusherConfigException from . import push_rule_evaluator, push_tools if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index d10201b6b..2e5161de2 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -40,7 +40,7 @@ from synapse.util.async_helpers import concurrently_execute from synapse.visibility import filter_events_for_client if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 2aa7918fb..cb9412785 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -22,7 +22,7 @@ from synapse.push.httppusher import HttpPusher from synapse.push.mailer import Mailer if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py index 045bd014d..93161c3df 100644 --- a/synapse/replication/slave/storage/pushers.py +++ b/synapse/replication/slave/storage/pushers.py @@ -24,7 +24,7 @@ from ._base import BaseSlavedStore from ._slaved_id_tracker import SlavedIdTracker if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore): diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index 7e8e64d61..3dfee7674 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -33,7 +33,7 @@ import attr from synapse.replication.http.streams import ReplicationGetStreamUpdates if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 7fcc48a9d..40646ef24 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -28,7 +28,7 @@ from synapse.rest.admin._base import ( from synapse.types import JsonDict if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 6c722d634..525efdf22 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -58,7 +58,7 @@ from synapse.util import json_decoder from synapse.util.stringutils import parse_and_validate_server_name, random_string if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index adf1d3972..c2ba790ba 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -45,7 +45,7 @@ from synapse.util.threepids import canonicalise_email, check_3pid_allowed from ._base import client_patterns, interactive_auth_handler if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 5901432fa..08fb6b2b0 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -38,7 +38,7 @@ from synapse.types import GroupID, JsonDict from ._base import client_patterns if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py index 1eff98ef1..c41a7ab41 100644 --- a/synapse/rest/media/v1/config_resource.py +++ b/synapse/rest/media/v1/config_resource.py @@ -23,7 +23,7 @@ from synapse.http.server import DirectServeJsonResource, respond_with_json from synapse.http.site import SynapseRequest if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer class MediaConfigResource(DirectServeJsonResource): diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index 8a43581f1..5dadaeaf5 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -24,8 +24,8 @@ from synapse.http.servlet import parse_boolean from ._base import parse_media_id, respond_404 if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer from synapse.rest.media.v1.media_repository import MediaRepository + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 8b4841ed5..0c041b542 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -58,7 +58,7 @@ from .thumbnailer import Thumbnailer, ThumbnailError from .upload_resource import UploadResource if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index b8895aeaa..e590a0dea 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -54,8 +54,8 @@ from ._base import FileInfo if TYPE_CHECKING: from lxml import etree - from synapse.app.homeserver import HomeServer from synapse.rest.media.v1.media_repository import MediaRepository + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index e92006faa..031947557 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -29,7 +29,7 @@ from .media_storage import FileResponder logger = logging.getLogger(__name__) if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer class StorageProvider(metaclass=abc.ABCMeta): diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index fbcd50f1e..af802bc0b 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -34,8 +34,8 @@ from ._base import ( ) if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer from synapse.rest.media.v1.media_repository import MediaRepository + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index ae5aef2f7..0138b2e2d 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -26,8 +26,8 @@ from synapse.http.site import SynapseRequest from synapse.rest.media.v1.media_storage import SpamMediaException if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer from synapse.rest.media.v1.media_repository import MediaRepository + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index a3c52695e..0b9007e51 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -36,7 +36,7 @@ from synapse.storage.purge_events import PurgeEventsStorage from synapse.storage.state import StateGroupStorage if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer __all__ = ["Databases", "DataStore"] diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a25c4093b..240905329 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -27,7 +27,7 @@ from synapse.types import Collection, StreamToken, get_domain_from_id from synapse.util import json_decoder if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 329660cf0..ccb06aab3 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -23,7 +23,7 @@ from synapse.util import json_encoder from . import engines if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer from synapse.storage.database import DatabasePool, LoggingTransaction logger = logging.getLogger(__name__) diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 03a38422a..85bb853d3 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -32,7 +32,7 @@ from synapse.types import JsonDict from synapse.util import json_encoder if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 85f1ebac9..c65558c28 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -27,7 +27,7 @@ from synapse.util import json_encoder from synapse.util.caches.descriptors import cached, cachedList if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/storage/purge_events.py b/synapse/storage/purge_events.py index 4dcd848c5..ad954990a 100644 --- a/synapse/storage/purge_events.py +++ b/synapse/storage/purge_events.py @@ -20,7 +20,7 @@ from typing import TYPE_CHECKING, Set from synapse.storage.databases import Databases if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer logger = logging.getLogger(__name__) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index d179a4188..aa25bd835 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -32,7 +32,7 @@ from synapse.events import EventBase from synapse.types import MutableStateMap, StateMap if TYPE_CHECKING: - from synapse.app.homeserver import HomeServer + from synapse.server import HomeServer from synapse.storage.databases import Databases logger = logging.getLogger(__name__) From 4ecba9bd5cb9c094da864d05e3a10456c30bc409 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 23 Mar 2021 11:51:12 +0000 Subject: [PATCH 25/52] Federation API for Space summary (#9652) Builds on the work done in #9643 to add a federation API for space summaries. There's a bit of refactoring of the existing client-server code first, to avoid too much duplication. --- changelog.d/9652.feature | 1 + synapse/federation/transport/server.py | 67 +++++++-- synapse/handlers/space_summary.py | 189 ++++++++++++++++++------- 3 files changed, 200 insertions(+), 57 deletions(-) create mode 100644 changelog.d/9652.feature diff --git a/changelog.d/9652.feature b/changelog.d/9652.feature new file mode 100644 index 000000000..2f7ccedcf --- /dev/null +++ b/changelog.d/9652.feature @@ -0,0 +1 @@ +Add initial experimental support for a "space summary" API. diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 2cf935f38..84e39c5a4 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -18,7 +18,7 @@ import functools import logging import re -from typing import Optional, Tuple, Type +from typing import Container, Mapping, Optional, Sequence, Tuple, Type import synapse from synapse.api.constants import MAX_GROUP_CATEGORYID_LENGTH, MAX_GROUP_ROLEID_LENGTH @@ -29,7 +29,7 @@ from synapse.api.urls import ( FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX, ) -from synapse.http.server import JsonResource +from synapse.http.server import HttpServer, JsonResource from synapse.http.servlet import ( parse_boolean_from_args, parse_integer_from_args, @@ -44,7 +44,8 @@ from synapse.logging.opentracing import ( whitelisted_homeserver, ) from synapse.server import HomeServer -from synapse.types import ThirdPartyInstanceID, get_domain_from_id +from synapse.types import JsonDict, ThirdPartyInstanceID, get_domain_from_id +from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.stringutils import parse_and_validate_server_name from synapse.util.versionstring import get_version_string @@ -1376,6 +1377,40 @@ class FederationGroupsSettingJoinPolicyServlet(BaseFederationServlet): return 200, new_content +class FederationSpaceSummaryServlet(BaseFederationServlet): + PREFIX = FEDERATION_UNSTABLE_PREFIX + "/org.matrix.msc2946" + PATH = "/spaces/(?P[^/]*)" + + async def on_POST( + self, + origin: str, + content: JsonDict, + query: Mapping[bytes, Sequence[bytes]], + room_id: str, + ) -> Tuple[int, JsonDict]: + suggested_only = content.get("suggested_only", False) + if not isinstance(suggested_only, bool): + raise SynapseError( + 400, "'suggested_only' must be a boolean", Codes.BAD_JSON + ) + + exclude_rooms = content.get("exclude_rooms", []) + if not isinstance(exclude_rooms, list) or any( + not isinstance(x, str) for x in exclude_rooms + ): + raise SynapseError(400, "bad value for 'exclude_rooms'", Codes.BAD_JSON) + + max_rooms_per_space = content.get("max_rooms_per_space") + if max_rooms_per_space is not None and not isinstance(max_rooms_per_space, int): + raise SynapseError( + 400, "bad value for 'max_rooms_per_space'", Codes.BAD_JSON + ) + + return 200, await self.handler.federation_space_summary( + room_id, suggested_only, max_rooms_per_space, exclude_rooms + ) + + class RoomComplexityServlet(BaseFederationServlet): """ Indicates to other servers how complex (and therefore likely @@ -1474,18 +1509,24 @@ DEFAULT_SERVLET_GROUPS = ( ) -def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=None): +def register_servlets( + hs: HomeServer, + resource: HttpServer, + authenticator: Authenticator, + ratelimiter: FederationRateLimiter, + servlet_groups: Optional[Container[str]] = None, +): """Initialize and register servlet classes. Will by default register all servlets. For custom behaviour, pass in a list of servlet_groups to register. Args: - hs (synapse.server.HomeServer): homeserver - resource (JsonResource): resource class to register to - authenticator (Authenticator): authenticator to use - ratelimiter (util.ratelimitutils.FederationRateLimiter): ratelimiter to use - servlet_groups (list[str], optional): List of servlet groups to register. + hs: homeserver + resource: resource class to register to + authenticator: authenticator to use + ratelimiter: ratelimiter to use + servlet_groups: List of servlet groups to register. Defaults to ``DEFAULT_SERVLET_GROUPS``. """ if not servlet_groups: @@ -1500,6 +1541,14 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N server_name=hs.hostname, ).register(resource) + if hs.config.experimental.spaces_enabled: + FederationSpaceSummaryServlet( + handler=hs.get_space_summary_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + if "openid" in servlet_groups: for servletclass in OPENID_SERVLET_CLASSES: servletclass( diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py index 513dc0c71..f5ead9447 100644 --- a/synapse/handlers/space_summary.py +++ b/synapse/handlers/space_summary.py @@ -16,7 +16,9 @@ import itertools import logging from collections import deque -from typing import TYPE_CHECKING, Iterable, List, Optional, Set +from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Set, Tuple + +import attr from synapse.api.constants import EventContentFields, EventTypes, HistoryVisibility from synapse.api.errors import AuthError @@ -54,7 +56,7 @@ class SpaceSummaryHandler: max_rooms_per_space: Optional[int] = None, ) -> JsonDict: """ - Implementation of the space summary API + Implementation of the space summary C-S API Args: requester: user id of the user making this request @@ -66,7 +68,7 @@ class SpaceSummaryHandler: max_rooms_per_space: an optional limit on the number of child rooms we will return. This does not apply to the root room (ie, room_id), and - is overridden by ROOMS_PER_SPACE_LIMIT. + is overridden by MAX_ROOMS_PER_SPACE. Returns: summary dict to return @@ -76,68 +78,154 @@ class SpaceSummaryHandler: await self._auth.check_user_in_room_or_world_readable(room_id, requester) # the queue of rooms to process - room_queue = deque((room_id,)) + room_queue = deque((_RoomQueueEntry(room_id),)) processed_rooms = set() # type: Set[str] rooms_result = [] # type: List[JsonDict] events_result = [] # type: List[JsonDict] - now = self._clock.time_msec() + while room_queue and len(rooms_result) < MAX_ROOMS: + queue_entry = room_queue.popleft() + room_id = queue_entry.room_id + logger.debug("Processing room %s", room_id) + processed_rooms.add(room_id) + + # The client-specified max_rooms_per_space limit doesn't apply to the + # room_id specified in the request, so we ignore it if this is the + # first room we are processing. + max_children = max_rooms_per_space if processed_rooms else None + + rooms, events = await self._summarize_local_room( + requester, room_id, suggested_only, max_children + ) + + rooms_result.extend(rooms) + events_result.extend(events) + + # add any children that we haven't already processed to the queue + for edge_event in events: + if edge_event["state_key"] not in processed_rooms: + room_queue.append(_RoomQueueEntry(edge_event["state_key"])) + + return {"rooms": rooms_result, "events": events_result} + + async def federation_space_summary( + self, + room_id: str, + suggested_only: bool, + max_rooms_per_space: Optional[int], + exclude_rooms: Iterable[str], + ) -> JsonDict: + """ + Implementation of the space summary Federation API + + Args: + room_id: room id to start the summary at + + suggested_only: whether we should only return children with the "suggested" + flag set. + + max_rooms_per_space: an optional limit on the number of child rooms we will + return. Unlike the C-S API, this applies to the root room (room_id). + It is clipped to MAX_ROOMS_PER_SPACE. + + exclude_rooms: a list of rooms to skip over (presumably because the + calling server has already seen them). + + Returns: + summary dict to return + """ + # the queue of rooms to process + room_queue = deque((room_id,)) + + # the set of rooms that we should not walk further. Initialise it with the + # excluded-rooms list; we will add other rooms as we process them so that + # we do not loop. + processed_rooms = set(exclude_rooms) # type: Set[str] + + rooms_result = [] # type: List[JsonDict] + events_result = [] # type: List[JsonDict] while room_queue and len(rooms_result) < MAX_ROOMS: room_id = room_queue.popleft() logger.debug("Processing room %s", room_id) processed_rooms.add(room_id) - try: - await self._auth.check_user_in_room_or_world_readable( - room_id, requester - ) - except AuthError: - logger.info( - "user %s cannot view room %s, omitting from summary", - requester, - room_id, - ) - continue + rooms, events = await self._summarize_local_room( + None, room_id, suggested_only, max_rooms_per_space + ) - room_entry = await self._build_room_entry(room_id) - rooms_result.append(room_entry) + rooms_result.extend(rooms) + events_result.extend(events) - # look for child rooms/spaces. - child_events = await self._get_child_events(room_id) - - if suggested_only: - # we only care about suggested children - child_events = filter(_is_suggested_child_event, child_events) - - # The client-specified max_rooms_per_space limit doesn't apply to the - # room_id specified in the request, so we ignore it if this is the - # first room we are processing. Otherwise, apply any client-specified - # limit, capping to our built-in limit. - if max_rooms_per_space is not None and len(processed_rooms) > 1: - max_rooms = min(MAX_ROOMS_PER_SPACE, max_rooms_per_space) - else: - max_rooms = MAX_ROOMS_PER_SPACE - - for edge_event in itertools.islice(child_events, max_rooms): - edge_room_id = edge_event.state_key - - events_result.append( - await self._event_serializer.serialize_event( - edge_event, - time_now=now, - event_format=format_event_for_client_v2, - ) - ) - - # if we haven't yet visited the target of this link, add it to the queue - if edge_room_id not in processed_rooms: - room_queue.append(edge_room_id) + # add any children that we haven't already processed to the queue + for edge_event in events: + if edge_event["state_key"] not in processed_rooms: + room_queue.append(edge_event["state_key"]) return {"rooms": rooms_result, "events": events_result} + async def _summarize_local_room( + self, + requester: Optional[str], + room_id: str, + suggested_only: bool, + max_children: Optional[int], + ) -> Tuple[Sequence[JsonDict], Sequence[JsonDict]]: + if not await self._is_room_accessible(room_id, requester): + return (), () + + room_entry = await self._build_room_entry(room_id) + + # look for child rooms/spaces. + child_events = await self._get_child_events(room_id) + + if suggested_only: + # we only care about suggested children + child_events = filter(_is_suggested_child_event, child_events) + + if max_children is None or max_children > MAX_ROOMS_PER_SPACE: + max_children = MAX_ROOMS_PER_SPACE + + now = self._clock.time_msec() + events_result = [] # type: List[JsonDict] + for edge_event in itertools.islice(child_events, max_children): + events_result.append( + await self._event_serializer.serialize_event( + edge_event, + time_now=now, + event_format=format_event_for_client_v2, + ) + ) + return (room_entry,), events_result + + async def _is_room_accessible(self, room_id: str, requester: Optional[str]) -> bool: + # if we have an authenticated requesting user, first check if they are in the + # room + if requester: + try: + await self._auth.check_user_in_room(room_id, requester) + return True + except AuthError: + pass + + # otherwise, check if the room is peekable + hist_vis_ev = await self._state_handler.get_current_state( + room_id, EventTypes.RoomHistoryVisibility, "" + ) + if hist_vis_ev: + hist_vis = hist_vis_ev.content.get("history_visibility") + if hist_vis == HistoryVisibility.WORLD_READABLE: + return True + + logger.info( + "room %s is unpeekable and user %s is not a member, omitting from summary", + room_id, + requester, + ) + return False + async def _build_room_entry(self, room_id: str) -> JsonDict: """Generate en entry suitable for the 'rooms' list in the summary response""" stats = await self._store.get_room_with_stats(room_id) @@ -191,6 +279,11 @@ class SpaceSummaryHandler: return (e for e in events if e.content.get("via")) +@attr.s(frozen=True, slots=True) +class _RoomQueueEntry: + room_id = attr.ib(type=str) + + def _is_suggested_child_event(edge_event: EventBase) -> bool: suggested = edge_event.content.get("suggested") if isinstance(suggested, bool) and suggested: From 0caf2a338eacda084454bae84514875af6349eeb Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Tue, 23 Mar 2021 14:52:30 +0100 Subject: [PATCH 26/52] Fix federation stall on concurrent access errors (#9639) --- changelog.d/9639.bugfix | 1 + .../storage/databases/main/transactions.py | 45 ++++--------------- 2 files changed, 10 insertions(+), 36 deletions(-) create mode 100644 changelog.d/9639.bugfix diff --git a/changelog.d/9639.bugfix b/changelog.d/9639.bugfix new file mode 100644 index 000000000..51b374670 --- /dev/null +++ b/changelog.d/9639.bugfix @@ -0,0 +1 @@ +Fix bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 030966184..b7072f1f5 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -22,7 +22,6 @@ from canonicaljson import encode_canonical_json from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import DatabasePool, LoggingTransaction -from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import JsonDict from synapse.util.caches.expiringcache import ExpiringCache @@ -312,49 +311,23 @@ class TransactionStore(TransactionWorkerStore): stream_ordering: the stream_ordering of the event """ - return await self.db_pool.runInteraction( - "store_destination_rooms_entries", - self._store_destination_rooms_entries_txn, - destinations, - room_id, - stream_ordering, + await self.db_pool.simple_upsert_many( + table="destinations", + key_names=("destination",), + key_values=[(d,) for d in destinations], + value_names=[], + value_values=[], + desc="store_destination_rooms_entries_dests", ) - def _store_destination_rooms_entries_txn( - self, - txn: LoggingTransaction, - destinations: Iterable[str], - room_id: str, - stream_ordering: int, - ) -> None: - - # ensure we have a `destinations` row for this destination, as there is - # a foreign key constraint. - if isinstance(self.database_engine, PostgresEngine): - q = """ - INSERT INTO destinations (destination) - VALUES (?) - ON CONFLICT DO NOTHING; - """ - elif isinstance(self.database_engine, Sqlite3Engine): - q = """ - INSERT OR IGNORE INTO destinations (destination) - VALUES (?); - """ - else: - raise RuntimeError("Unknown database engine") - - txn.execute_batch(q, ((destination,) for destination in destinations)) - rows = [(destination, room_id) for destination in destinations] - - self.db_pool.simple_upsert_many_txn( - txn, + await self.db_pool.simple_upsert_many( table="destination_rooms", key_names=("destination", "room_id"), key_values=rows, value_names=["stream_ordering"], value_values=[(stream_ordering,)] * len(rows), + desc="store_destination_rooms_entries_rooms", ) async def get_destination_last_successful_stream_ordering( From e550ab17adc8dd3c48daf7fedcd09418a73f524b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 23 Mar 2021 14:52:20 +0000 Subject: [PATCH 27/52] Increase default join burst ratelimiting (#9674) It's legitimate behaviour to try and join a bunch of rooms at once. --- changelog.d/9674.misc | 1 + docs/sample_config.yaml | 4 ++-- synapse/config/ratelimiting.py | 8 ++++---- 3 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 changelog.d/9674.misc diff --git a/changelog.d/9674.misc b/changelog.d/9674.misc new file mode 100644 index 000000000..c82fde61b --- /dev/null +++ b/changelog.d/9674.misc @@ -0,0 +1 @@ +Increase default join ratelimiting burst rate. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a9f59e39f..07a928224 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -869,10 +869,10 @@ log_config: "CONFDIR/SERVERNAME.log.config" #rc_joins: # local: # per_second: 0.1 -# burst_count: 3 +# burst_count: 10 # remote: # per_second: 0.01 -# burst_count: 3 +# burst_count: 10 # #rc_3pid_validation: # per_second: 0.003 diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 847d25122..3f3997f4e 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -95,11 +95,11 @@ class RatelimitConfig(Config): self.rc_joins_local = RateLimitConfig( config.get("rc_joins", {}).get("local", {}), - defaults={"per_second": 0.1, "burst_count": 3}, + defaults={"per_second": 0.1, "burst_count": 10}, ) self.rc_joins_remote = RateLimitConfig( config.get("rc_joins", {}).get("remote", {}), - defaults={"per_second": 0.01, "burst_count": 3}, + defaults={"per_second": 0.01, "burst_count": 10}, ) # Ratelimit cross-user key requests: @@ -187,10 +187,10 @@ class RatelimitConfig(Config): #rc_joins: # local: # per_second: 0.1 - # burst_count: 3 + # burst_count: 10 # remote: # per_second: 0.01 - # burst_count: 3 + # burst_count: 10 # #rc_3pid_validation: # per_second: 0.003 From 7e8dc9934e29ebd5f30f42b4b6e6b4491569373a Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 24 Mar 2021 06:48:46 -0400 Subject: [PATCH 28/52] Add a type hints for service notices to the HomeServer object. (#9675) --- changelog.d/9675.misc | 1 + synapse/handlers/sync.py | 6 ++++-- synapse/rest/client/v2_alpha/sync.py | 11 ++++++++--- synapse/server.py | 4 ++-- .../server_notices/consent_server_notices.py | 18 ++++++++++-------- .../resource_limits_server_notices.py | 11 +++++------ .../server_notices/server_notices_manager.py | 2 +- .../server_notices/server_notices_sender.py | 18 ++++++++++-------- .../worker_server_notices_sender.py | 11 ++++++----- synapse/storage/databases/main/deviceinbox.py | 6 +++--- .../databases/main/monthly_active_users.py | 4 ++-- 11 files changed, 52 insertions(+), 40 deletions(-) create mode 100644 changelog.d/9675.misc diff --git a/changelog.d/9675.misc b/changelog.d/9675.misc new file mode 100644 index 000000000..35338cd33 --- /dev/null +++ b/changelog.d/9675.misc @@ -0,0 +1 @@ +Add additional type hints to the Homeserver object. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7b723ead5..ee607e6e6 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -80,7 +80,7 @@ class SyncConfig: filter_collection = attr.ib(type=FilterCollection) is_guest = attr.ib(type=bool) request_key = attr.ib(type=Tuple[Any, ...]) - device_id = attr.ib(type=str) + device_id = attr.ib(type=Optional[str]) @attr.s(slots=True, frozen=True) @@ -723,7 +723,9 @@ class SyncHandler: return summary - def get_lazy_loaded_members_cache(self, cache_key: Tuple[str, str]) -> LruCache: + def get_lazy_loaded_members_cache( + self, cache_key: Tuple[str, Optional[str]] + ) -> LruCache: cache = self.lazy_loaded_members_cache.get(cache_key) if cache is None: logger.debug("creating LruCache for %r", cache_key) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 8e52e4cca..a0db0a054 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -15,6 +15,7 @@ import itertools import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.constants import PresenceState from synapse.api.errors import Codes, StoreError, SynapseError @@ -26,11 +27,15 @@ from synapse.events.utils import ( from synapse.handlers.presence import format_user_presence_state from synapse.handlers.sync import SyncConfig from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string -from synapse.types import StreamToken +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict, StreamToken from synapse.util import json_decoder from ._base import client_patterns, set_timeline_upper_limit +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -73,7 +78,7 @@ class SyncRestServlet(RestServlet): PATTERNS = client_patterns("/sync$") ALLOWED_PRESENCE = {"online", "offline", "unavailable"} - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.auth = hs.get_auth() @@ -85,7 +90,7 @@ class SyncRestServlet(RestServlet): self._server_notices_sender = hs.get_server_notices_sender() self._event_serializer = hs.get_event_client_serializer() - async def on_GET(self, request): + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: if b"from" in request.args: # /events used to use 'from', but /sync uses 'since'. # Lets be helpful and whine if we see a 'from'. diff --git a/synapse/server.py b/synapse/server.py index 98822d8e2..5e787e228 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -650,13 +650,13 @@ class HomeServer(metaclass=abc.ABCMeta): return FederationHandlerRegistry(self) @cache_in_self - def get_server_notices_manager(self): + def get_server_notices_manager(self) -> ServerNoticesManager: if self.config.worker_app: raise Exception("Workers cannot send server notices") return ServerNoticesManager(self) @cache_in_self - def get_server_notices_sender(self): + def get_server_notices_sender(self) -> WorkerServerNoticesSender: if self.config.worker_app: return WorkerServerNoticesSender(self) return ServerNoticesSender(self) diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py index 9137c4edb..a9349bf9a 100644 --- a/synapse/server_notices/consent_server_notices.py +++ b/synapse/server_notices/consent_server_notices.py @@ -13,13 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Any +from typing import TYPE_CHECKING, Any, Set from synapse.api.errors import SynapseError from synapse.api.urls import ConsentURIBuilder from synapse.config import ConfigError from synapse.types import get_localpart_from_id +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -28,16 +31,11 @@ class ConsentServerNotices: privacy policy consent, and sends one if we do. """ - def __init__(self, hs): - """ - - Args: - hs (synapse.server.HomeServer): - """ + def __init__(self, hs: "HomeServer"): self._server_notices_manager = hs.get_server_notices_manager() self._store = hs.get_datastore() - self._users_in_progress = set() + self._users_in_progress = set() # type: Set[str] self._current_consent_version = hs.config.user_consent_version self._server_notice_content = hs.config.user_consent_server_notice_content @@ -73,6 +71,10 @@ class ConsentServerNotices: try: u = await self._store.get_user_by_id(user_id) + # The user doesn't exist. + if u is None: + return + if u["is_guest"] and not self._send_to_guests: # don't send to guests return diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index 665245134..a18a2e76c 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import List, Tuple +from typing import TYPE_CHECKING, List, Tuple from synapse.api.constants import ( EventTypes, @@ -24,6 +24,9 @@ from synapse.api.constants import ( from synapse.api.errors import AuthError, ResourceLimitError, SynapseError from synapse.server_notices.server_notices_manager import SERVER_NOTICE_ROOM_TAG +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -32,11 +35,7 @@ class ResourceLimitsServerNotices: ensures that the client is kept up to date. """ - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): - """ + def __init__(self, hs: "HomeServer"): self._server_notices_manager = hs.get_server_notices_manager() self._store = hs.get_datastore() self._auth = hs.get_auth() diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index c46b2f047..144e1da78 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -58,7 +58,7 @@ class ServerNoticesManager: user_id: str, event_content: dict, type: str = EventTypes.Message, - state_key: Optional[bool] = None, + state_key: Optional[str] = None, ) -> EventBase: """Send a notice to the given user diff --git a/synapse/server_notices/server_notices_sender.py b/synapse/server_notices/server_notices_sender.py index 6870b67ca..965c64588 100644 --- a/synapse/server_notices/server_notices_sender.py +++ b/synapse/server_notices/server_notices_sender.py @@ -12,25 +12,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterable, Union +from typing import TYPE_CHECKING, Iterable, Union from synapse.server_notices.consent_server_notices import ConsentServerNotices from synapse.server_notices.resource_limits_server_notices import ( ResourceLimitsServerNotices, ) +from synapse.server_notices.worker_server_notices_sender import ( + WorkerServerNoticesSender, +) + +if TYPE_CHECKING: + from synapse.server import HomeServer -class ServerNoticesSender: +class ServerNoticesSender(WorkerServerNoticesSender): """A centralised place which sends server notices automatically when Certain Events take place """ - def __init__(self, hs): - """ - - Args: - hs (synapse.server.HomeServer): - """ + def __init__(self, hs: "HomeServer"): + super().__init__(hs) self._server_notices = ( ConsentServerNotices(hs), ResourceLimitsServerNotices(hs), diff --git a/synapse/server_notices/worker_server_notices_sender.py b/synapse/server_notices/worker_server_notices_sender.py index 9273e6189..c76bd5746 100644 --- a/synapse/server_notices/worker_server_notices_sender.py +++ b/synapse/server_notices/worker_server_notices_sender.py @@ -12,16 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from synapse.server import HomeServer class WorkerServerNoticesSender: """Stub impl of ServerNoticesSender which does nothing""" - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): - """ + def __init__(self, hs: "HomeServer"): + pass async def on_user_syncing(self, user_id: str) -> None: """Called when the user performs a sync operation. diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index 45ca6620a..691080ce7 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -14,7 +14,7 @@ # limitations under the License. import logging -from typing import List, Tuple +from typing import List, Optional, Tuple from synapse.logging.opentracing import log_kv, set_tag, trace from synapse.replication.tcp.streams import ToDeviceStream @@ -115,7 +115,7 @@ class DeviceInboxWorkerStore(SQLBaseStore): async def get_new_messages_for_device( self, user_id: str, - device_id: str, + device_id: Optional[str], last_stream_id: int, current_stream_id: int, limit: int = 100, @@ -163,7 +163,7 @@ class DeviceInboxWorkerStore(SQLBaseStore): @trace async def delete_messages_for_device( - self, user_id: str, device_id: str, up_to_stream_id: int + self, user_id: str, device_id: Optional[str], up_to_stream_id: int ) -> int: """ Args: diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index d788dc0fc..757da3d55 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from typing import Dict, List +from typing import Dict, List, Optional from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.storage._base import SQLBaseStore @@ -109,7 +109,7 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore): return users @cached(num_args=1) - async def user_last_seen_monthly_active(self, user_id: str) -> int: + async def user_last_seen_monthly_active(self, user_id: str) -> Optional[int]: """ Checks if a given user is part of the monthly active user group From af387cf52ac4641e7d5ade75d3483b0c41d05ebf Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 24 Mar 2021 06:49:01 -0400 Subject: [PATCH 29/52] Add type hints to misc. files. (#9676) --- changelog.d/9676.misc | 1 + mypy.ini | 5 +- synapse/events/third_party_rules.py | 15 +++--- synapse/secrets.py | 8 +-- synapse/storage/state.py | 4 +- synapse/visibility.py | 78 ++++++++++++++--------------- 6 files changed, 57 insertions(+), 54 deletions(-) create mode 100644 changelog.d/9676.misc diff --git a/changelog.d/9676.misc b/changelog.d/9676.misc new file mode 100644 index 000000000..829e38b93 --- /dev/null +++ b/changelog.d/9676.misc @@ -0,0 +1 @@ +Add type hints to third party event rules and visibility modules. diff --git a/mypy.ini b/mypy.ini index e0685e097..709a8d07a 100644 --- a/mypy.ini +++ b/mypy.ini @@ -20,8 +20,9 @@ files = synapse/crypto, synapse/event_auth.py, synapse/events/builder.py, - synapse/events/validator.py, synapse/events/spamcheck.py, + synapse/events/third_party_rules.py, + synapse/events/validator.py, synapse/federation, synapse/groups, synapse/handlers, @@ -38,6 +39,7 @@ files = synapse/push, synapse/replication, synapse/rest, + synapse/secrets.py, synapse/server.py, synapse/server_notices, synapse/spam_checker_api, @@ -71,6 +73,7 @@ files = synapse/util/metrics.py, synapse/util/macaroons.py, synapse/util/stringutils.py, + synapse/visibility.py, tests/replication, tests/test_utils, tests/handlers/test_password_providers.py, diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py index 02bce8b5c..9767d2394 100644 --- a/synapse/events/third_party_rules.py +++ b/synapse/events/third_party_rules.py @@ -13,12 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Union +from typing import TYPE_CHECKING, Union from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.types import Requester, StateMap +if TYPE_CHECKING: + from synapse.server import HomeServer + class ThirdPartyEventRules: """Allows server admins to provide a Python module implementing an extra @@ -28,7 +31,7 @@ class ThirdPartyEventRules: behaviours. """ - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): self.third_party_rules = None self.store = hs.get_datastore() @@ -95,10 +98,9 @@ class ThirdPartyEventRules: if self.third_party_rules is None: return True - ret = await self.third_party_rules.on_create_room( + return await self.third_party_rules.on_create_room( requester, config, is_requester_admin ) - return ret async def check_threepid_can_be_invited( self, medium: str, address: str, room_id: str @@ -119,10 +121,9 @@ class ThirdPartyEventRules: state_events = await self._get_state_map_for_room(room_id) - ret = await self.third_party_rules.check_threepid_can_be_invited( + return await self.third_party_rules.check_threepid_can_be_invited( medium, address, state_events ) - return ret async def check_visibility_can_be_modified( self, room_id: str, new_visibility: str @@ -143,7 +144,7 @@ class ThirdPartyEventRules: check_func = getattr( self.third_party_rules, "check_visibility_can_be_modified", None ) - if not check_func or not isinstance(check_func, Callable): + if not check_func or not callable(check_func): return True state_events = await self._get_state_map_for_room(room_id) diff --git a/synapse/secrets.py b/synapse/secrets.py index fb6d90a3b..7939db75e 100644 --- a/synapse/secrets.py +++ b/synapse/secrets.py @@ -26,10 +26,10 @@ if sys.version_info[0:2] >= (3, 6): import secrets class Secrets: - def token_bytes(self, nbytes=32): + def token_bytes(self, nbytes: int = 32) -> bytes: return secrets.token_bytes(nbytes) - def token_hex(self, nbytes=32): + def token_hex(self, nbytes: int = 32) -> str: return secrets.token_hex(nbytes) @@ -38,8 +38,8 @@ else: import os class Secrets: - def token_bytes(self, nbytes=32): + def token_bytes(self, nbytes: int = 32) -> bytes: return os.urandom(nbytes) - def token_hex(self, nbytes=32): + def token_hex(self, nbytes: int = 32) -> str: return binascii.hexlify(self.token_bytes(nbytes)).decode("ascii") diff --git a/synapse/storage/state.py b/synapse/storage/state.py index aa25bd835..2e277a21c 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -449,7 +449,7 @@ class StateGroupStorage: return self.stores.state._get_state_groups_from_groups(groups, state_filter) async def get_state_for_events( - self, event_ids: List[str], state_filter: StateFilter = StateFilter.all() + self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all() ) -> Dict[str, StateMap[EventBase]]: """Given a list of event_ids and type tuples, return a list of state dicts for each event. @@ -485,7 +485,7 @@ class StateGroupStorage: return {event: event_to_state[event] for event in event_ids} async def get_state_ids_for_events( - self, event_ids: List[str], state_filter: StateFilter = StateFilter.all() + self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all() ) -> Dict[str, StateMap[str]]: """ Get the state dicts corresponding to a list of events, containing the event_ids diff --git a/synapse/visibility.py b/synapse/visibility.py index e39d02602..ff53a49b3 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import operator +from typing import Dict, FrozenSet, List, Optional from synapse.api.constants import ( AccountDataTypes, @@ -21,10 +21,11 @@ from synapse.api.constants import ( HistoryVisibility, Membership, ) +from synapse.events import EventBase from synapse.events.utils import prune_event from synapse.storage import Storage from synapse.storage.state import StateFilter -from synapse.types import get_domain_from_id +from synapse.types import StateMap, get_domain_from_id logger = logging.getLogger(__name__) @@ -48,32 +49,32 @@ MEMBERSHIP_PRIORITY = ( async def filter_events_for_client( storage: Storage, - user_id, - events, - is_peeking=False, - always_include_ids=frozenset(), - filter_send_to_client=True, -): + user_id: str, + events: List[EventBase], + is_peeking: bool = False, + always_include_ids: FrozenSet[str] = frozenset(), + filter_send_to_client: bool = True, +) -> List[EventBase]: """ Check which events a user is allowed to see. If the user can see the event but its sender asked for their data to be erased, prune the content of the event. Args: storage - user_id(str): user id to be checked - events(list[synapse.events.EventBase]): sequence of events to be checked - is_peeking(bool): should be True if: + user_id: user id to be checked + events: sequence of events to be checked + is_peeking: should be True if: * the user is not currently a member of the room, and: * the user has not been a member of the room since the given events - always_include_ids (set(event_id)): set of event ids to specifically + always_include_ids: set of event ids to specifically include (unless sender is ignored) - filter_send_to_client (bool): Whether we're checking an event that's going to be + filter_send_to_client: Whether we're checking an event that's going to be sent to a client. This might not always be the case since this function can also be called to check whether a user can see the state at a given point. Returns: - list[synapse.events.EventBase] + The filtered events. """ # Filter out events that have been soft failed so that we don't relay them # to clients. @@ -90,7 +91,7 @@ async def filter_events_for_client( AccountDataTypes.IGNORED_USER_LIST, user_id ) - ignore_list = frozenset() + ignore_list = frozenset() # type: FrozenSet[str] if ignore_dict_content: ignored_users_dict = ignore_dict_content.get("ignored_users", {}) if isinstance(ignored_users_dict, dict): @@ -107,19 +108,18 @@ async def filter_events_for_client( room_id ] = await storage.main.get_retention_policy_for_room(room_id) - def allowed(event): + def allowed(event: EventBase) -> Optional[EventBase]: """ Args: - event (synapse.events.EventBase): event to check + event: event to check Returns: - None|EventBase: - None if the user cannot see this event at all + None if the user cannot see this event at all - a redacted copy of the event if they can only see a redacted - version + a redacted copy of the event if they can only see a redacted + version - the original event if they can see it as normal. + the original event if they can see it as normal. """ # Only run some checks if these events aren't about to be sent to clients. This is # because, if this is not the case, we're probably only checking if the users can @@ -252,48 +252,46 @@ async def filter_events_for_client( return event - # check each event: gives an iterable[None|EventBase] + # Check each event: gives an iterable of None or (a potentially modified) + # EventBase. filtered_events = map(allowed, events) - # remove the None entries - filtered_events = filter(operator.truth, filtered_events) - - # we turn it into a list before returning it. - return list(filtered_events) + # Turn it into a list and remove None entries before returning. + return [ev for ev in filtered_events if ev] async def filter_events_for_server( storage: Storage, - server_name, - events, - redact=True, - check_history_visibility_only=False, -): + server_name: str, + events: List[EventBase], + redact: bool = True, + check_history_visibility_only: bool = False, +) -> List[EventBase]: """Filter a list of events based on whether given server is allowed to see them. Args: storage - server_name (str) - events (iterable[FrozenEvent]) - redact (bool): Whether to return a redacted version of the event, or + server_name + events + redact: Whether to return a redacted version of the event, or to filter them out entirely. - check_history_visibility_only (bool): Whether to only check the + check_history_visibility_only: Whether to only check the history visibility, rather than things like if the sender has been erased. This is used e.g. during pagination to decide whether to backfill or not. Returns - list[FrozenEvent] + The filtered events. """ - def is_sender_erased(event, erased_senders): + def is_sender_erased(event: EventBase, erased_senders: Dict[str, bool]) -> bool: if erased_senders and erased_senders[event.sender]: logger.info("Sender of %s has been erased, redacting", event.event_id) return True return False - def check_event_is_visible(event, state): + def check_event_is_visible(event: EventBase, state: StateMap[EventBase]) -> bool: history = state.get((EventTypes.RoomHistoryVisibility, ""), None) if history: visibility = history.content.get( From 83de0be4b0398e94e67c7110d9455db86592ead0 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 24 Mar 2021 07:35:43 -0400 Subject: [PATCH 30/52] Bump mypy-zope to 0.2.13. (#9678) This fixes an error ("Cannot determine consistent method resolution order (MRO)") when running mypy with a cache. --- changelog.d/9678.misc | 1 + setup.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/9678.misc diff --git a/changelog.d/9678.misc b/changelog.d/9678.misc new file mode 100644 index 000000000..77a2b2d43 --- /dev/null +++ b/changelog.d/9678.misc @@ -0,0 +1 @@ +Bump mypy-zope to 0.2.13 to fix "Cannot determine consistent method resolution order (MRO)" errors when running mypy a second time. diff --git a/setup.py b/setup.py index b834e4e55..1939a7b86 100755 --- a/setup.py +++ b/setup.py @@ -103,7 +103,7 @@ CONDITIONAL_REQUIREMENTS["lint"] = [ "flake8", ] -CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.11"] +CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"] # Dependencies which are exclusively required by unit test code. This is # NOT a list of all modules that are necessary to run the unit tests. From 4655d2221e79b0ecdcb88c7e1eef467eec2ed481 Mon Sep 17 00:00:00 2001 From: Ben Banfield-Zanin Date: Wed, 24 Mar 2021 11:43:04 +0000 Subject: [PATCH 31/52] docs: fallback/web endpoint does not appear to be mounted on workers (#9679) --- changelog.d/9679.doc | 1 + docs/workers.md | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9679.doc diff --git a/changelog.d/9679.doc b/changelog.d/9679.doc new file mode 100644 index 000000000..34f87490d --- /dev/null +++ b/changelog.d/9679.doc @@ -0,0 +1 @@ +Improve worker documentation for fallback/web auth endpoints. diff --git a/docs/workers.md b/docs/workers.md index e7bf9b8ce..c6282165b 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -232,7 +232,6 @@ expressions: # Registration/login requests ^/_matrix/client/(api/v1|r0|unstable)/login$ ^/_matrix/client/(r0|unstable)/register$ - ^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$ # Event sending requests ^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact @@ -276,7 +275,7 @@ using): Ensure that all SSO logins go to a single process. For multiple workers not handling the SSO endpoints properly, see -[#7530](https://github.com/matrix-org/synapse/issues/7530) and +[#7530](https://github.com/matrix-org/synapse/issues/7530) and [#9427](https://github.com/matrix-org/synapse/issues/9427). Note that a HTTP listener with `client` and `federation` resources must be From c73cc2c2ad7244a0080f35d9710cedfe11917e69 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 24 Mar 2021 12:45:39 +0000 Subject: [PATCH 32/52] Spaces summary: call out to other servers (#9653) When we hit an unknown room in the space tree, see if there are other servers that we might be able to poll to get the data. Fixes: #9447 --- changelog.d/9653.feature | 1 + synapse/federation/federation_client.py | 180 ++++++++++++++++++++++-- synapse/federation/transport/client.py | 35 ++++- synapse/handlers/space_summary.py | 135 +++++++++++++++--- 4 files changed, 324 insertions(+), 27 deletions(-) create mode 100644 changelog.d/9653.feature diff --git a/changelog.d/9653.feature b/changelog.d/9653.feature new file mode 100644 index 000000000..2f7ccedcf --- /dev/null +++ b/changelog.d/9653.feature @@ -0,0 +1 @@ +Add initial experimental support for a "space summary" API. diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 3b2f51baa..afdb5bf2f 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -27,11 +27,13 @@ from typing import ( List, Mapping, Optional, + Sequence, Tuple, TypeVar, Union, ) +import attr from prometheus_client import Counter from twisted.internet import defer @@ -455,6 +457,7 @@ class FederationClient(FederationBase): description: str, destinations: Iterable[str], callback: Callable[[str], Awaitable[T]], + failover_on_unknown_endpoint: bool = False, ) -> T: """Try an operation on a series of servers, until it succeeds @@ -474,6 +477,10 @@ class FederationClient(FederationBase): next server tried. Normally the stacktrace is logged but this is suppressed if the exception is an InvalidResponseError. + failover_on_unknown_endpoint: if True, we will try other servers if it looks + like a server doesn't support the endpoint. This is typically useful + if the endpoint in question is new or experimental. + Returns: The result of callback, if it succeeds @@ -493,16 +500,31 @@ class FederationClient(FederationBase): except UnsupportedRoomVersionError: raise except HttpResponseException as e: - if not 500 <= e.code < 600: - raise e.to_synapse_error() - else: - logger.warning( - "Failed to %s via %s: %i %s", - description, - destination, - e.code, - e.args[0], - ) + synapse_error = e.to_synapse_error() + failover = False + + if 500 <= e.code < 600: + failover = True + + elif failover_on_unknown_endpoint: + # there is no good way to detect an "unknown" endpoint. Dendrite + # returns a 404 (with no body); synapse returns a 400 + # with M_UNRECOGNISED. + if e.code == 404 or ( + e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED + ): + failover = True + + if not failover: + raise synapse_error from e + + logger.warning( + "Failed to %s via %s: %i %s", + description, + destination, + e.code, + e.args[0], + ) except Exception: logger.warning( "Failed to %s via %s", description, destination, exc_info=True @@ -1042,3 +1064,141 @@ class FederationClient(FederationBase): # If we don't manage to find it, return None. It's not an error if a # server doesn't give it to us. return None + + async def get_space_summary( + self, + destinations: Iterable[str], + room_id: str, + suggested_only: bool, + max_rooms_per_space: Optional[int], + exclude_rooms: List[str], + ) -> "FederationSpaceSummaryResult": + """ + Call other servers to get a summary of the given space + + + Args: + destinations: The remote servers. We will try them in turn, omitting any + that have been blacklisted. + + room_id: ID of the space to be queried + + suggested_only: If true, ask the remote server to only return children + with the "suggested" flag set + + max_rooms_per_space: A limit on the number of children to return for each + space + + exclude_rooms: A list of room IDs to tell the remote server to skip + + Returns: + a parsed FederationSpaceSummaryResult + + Raises: + SynapseError if we were unable to get a valid summary from any of the + remote servers + """ + + async def send_request(destination: str) -> FederationSpaceSummaryResult: + res = await self.transport_layer.get_space_summary( + destination=destination, + room_id=room_id, + suggested_only=suggested_only, + max_rooms_per_space=max_rooms_per_space, + exclude_rooms=exclude_rooms, + ) + + try: + return FederationSpaceSummaryResult.from_json_dict(res) + except ValueError as e: + raise InvalidResponseError(str(e)) + + return await self._try_destination_list( + "fetch space summary", + destinations, + send_request, + failover_on_unknown_endpoint=True, + ) + + +@attr.s(frozen=True, slots=True) +class FederationSpaceSummaryEventResult: + """Represents a single event in the result of a successful get_space_summary call. + + It's essentially just a serialised event object, but we do a bit of parsing and + validation in `from_json_dict` and store some of the validated properties in + object attributes. + """ + + event_type = attr.ib(type=str) + state_key = attr.ib(type=str) + via = attr.ib(type=Sequence[str]) + + # the raw data, including the above keys + data = attr.ib(type=JsonDict) + + @classmethod + def from_json_dict(cls, d: JsonDict) -> "FederationSpaceSummaryEventResult": + """Parse an event within the result of a /spaces/ request + + Args: + d: json object to be parsed + + Raises: + ValueError if d is not a valid event + """ + + event_type = d.get("type") + if not isinstance(event_type, str): + raise ValueError("Invalid event: 'event_type' must be a str") + + state_key = d.get("state_key") + if not isinstance(state_key, str): + raise ValueError("Invalid event: 'state_key' must be a str") + + content = d.get("content") + if not isinstance(content, dict): + raise ValueError("Invalid event: 'content' must be a dict") + + via = content.get("via") + if not isinstance(via, Sequence): + raise ValueError("Invalid event: 'via' must be a list") + if any(not isinstance(v, str) for v in via): + raise ValueError("Invalid event: 'via' must be a list of strings") + + return cls(event_type, state_key, via, d) + + +@attr.s(frozen=True, slots=True) +class FederationSpaceSummaryResult: + """Represents the data returned by a successful get_space_summary call.""" + + rooms = attr.ib(type=Sequence[JsonDict]) + events = attr.ib(type=Sequence[FederationSpaceSummaryEventResult]) + + @classmethod + def from_json_dict(cls, d: JsonDict) -> "FederationSpaceSummaryResult": + """Parse the result of a /spaces/ request + + Args: + d: json object to be parsed + + Raises: + ValueError if d is not a valid /spaces/ response + """ + rooms = d.get("rooms") + if not isinstance(rooms, Sequence): + raise ValueError("'rooms' must be a list") + if any(not isinstance(r, dict) for r in rooms): + raise ValueError("Invalid room in 'rooms' list") + + events = d.get("events") + if not isinstance(events, Sequence): + raise ValueError("'events' must be a list") + if any(not isinstance(e, dict) for e in events): + raise ValueError("Invalid event in 'events' list") + parsed_events = [ + FederationSpaceSummaryEventResult.from_json_dict(e) for e in events + ] + + return cls(rooms, parsed_events) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 10c4747f9..6aee47c43 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -16,7 +16,7 @@ import logging import urllib -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional from synapse.api.constants import Membership from synapse.api.errors import Codes, HttpResponseException, SynapseError @@ -26,6 +26,7 @@ from synapse.api.urls import ( FEDERATION_V2_PREFIX, ) from synapse.logging.utils import log_function +from synapse.types import JsonDict logger = logging.getLogger(__name__) @@ -978,6 +979,38 @@ class TransportLayerClient: return self.client.get_json(destination=destination, path=path) + async def get_space_summary( + self, + destination: str, + room_id: str, + suggested_only: bool, + max_rooms_per_space: Optional[int], + exclude_rooms: List[str], + ) -> JsonDict: + """ + Args: + destination: The remote server + room_id: The room ID to ask about. + suggested_only: if True, only suggested rooms will be returned + max_rooms_per_space: an optional limit to the number of children to be + returned per space + exclude_rooms: a list of any rooms we can skip + """ + path = _create_path( + FEDERATION_UNSTABLE_PREFIX, "/org.matrix.msc2946/spaces/%s", room_id + ) + + params = { + "suggested_only": suggested_only, + "exclude_rooms": exclude_rooms, + } + if max_rooms_per_space is not None: + params["max_rooms_per_space"] = max_rooms_per_space + + return await self.client.post_json( + destination=destination, path=path, data=params + ) + def _create_path(federation_prefix, path, *args): """ diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py index f5ead9447..5d9418969 100644 --- a/synapse/handlers/space_summary.py +++ b/synapse/handlers/space_summary.py @@ -16,7 +16,7 @@ import itertools import logging from collections import deque -from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Set, Tuple +from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Set, Tuple, cast import attr @@ -38,6 +38,9 @@ MAX_ROOMS = 50 # max number of events to return per room. MAX_ROOMS_PER_SPACE = 50 +# max number of federation servers to hit per room +MAX_SERVERS_PER_SPACE = 3 + class SpaceSummaryHandler: def __init__(self, hs: "HomeServer"): @@ -47,6 +50,8 @@ class SpaceSummaryHandler: self._state_handler = hs.get_state_handler() self._store = hs.get_datastore() self._event_serializer = hs.get_event_client_serializer() + self._server_name = hs.hostname + self._federation_client = hs.get_federation_client() async def get_space_summary( self, @@ -78,35 +83,81 @@ class SpaceSummaryHandler: await self._auth.check_user_in_room_or_world_readable(room_id, requester) # the queue of rooms to process - room_queue = deque((_RoomQueueEntry(room_id),)) + room_queue = deque((_RoomQueueEntry(room_id, ()),)) + # rooms we have already processed processed_rooms = set() # type: Set[str] + # events we have already processed. We don't necessarily have their event ids, + # so instead we key on (room id, state key) + processed_events = set() # type: Set[Tuple[str, str]] + rooms_result = [] # type: List[JsonDict] events_result = [] # type: List[JsonDict] while room_queue and len(rooms_result) < MAX_ROOMS: queue_entry = room_queue.popleft() room_id = queue_entry.room_id + if room_id in processed_rooms: + # already done this room + continue + logger.debug("Processing room %s", room_id) - processed_rooms.add(room_id) + + is_in_room = await self._store.is_host_joined(room_id, self._server_name) # The client-specified max_rooms_per_space limit doesn't apply to the # room_id specified in the request, so we ignore it if this is the # first room we are processing. max_children = max_rooms_per_space if processed_rooms else None - rooms, events = await self._summarize_local_room( - requester, room_id, suggested_only, max_children + if is_in_room: + rooms, events = await self._summarize_local_room( + requester, room_id, suggested_only, max_children + ) + else: + rooms, events = await self._summarize_remote_room( + queue_entry, + suggested_only, + max_children, + exclude_rooms=processed_rooms, + ) + + logger.debug( + "Query of %s returned rooms %s, events %s", + queue_entry.room_id, + [room.get("room_id") for room in rooms], + ["%s->%s" % (ev["room_id"], ev["state_key"]) for ev in events], ) rooms_result.extend(rooms) - events_result.extend(events) - # add any children that we haven't already processed to the queue - for edge_event in events: - if edge_event["state_key"] not in processed_rooms: - room_queue.append(_RoomQueueEntry(edge_event["state_key"])) + # any rooms returned don't need visiting again + processed_rooms.update(cast(str, room.get("room_id")) for room in rooms) + + # the room we queried may or may not have been returned, but don't process + # it again, anyway. + processed_rooms.add(room_id) + + # XXX: is it ok that we blindly iterate through any events returned by + # a remote server, whether or not they actually link to any rooms in our + # tree? + for ev in events: + # remote servers might return events we have already processed + # (eg, Dendrite returns inward pointers as well as outward ones), so + # we need to filter them out, to avoid returning duplicate links to the + # client. + ev_key = (ev["room_id"], ev["state_key"]) + if ev_key in processed_events: + continue + events_result.append(ev) + + # add the child to the queue. we have already validated + # that the vias are a list of server names. + room_queue.append( + _RoomQueueEntry(ev["state_key"], ev["content"]["via"]) + ) + processed_events.add(ev_key) return {"rooms": rooms_result, "events": events_result} @@ -149,20 +200,23 @@ class SpaceSummaryHandler: while room_queue and len(rooms_result) < MAX_ROOMS: room_id = room_queue.popleft() + if room_id in processed_rooms: + # already done this room + continue + logger.debug("Processing room %s", room_id) - processed_rooms.add(room_id) rooms, events = await self._summarize_local_room( None, room_id, suggested_only, max_rooms_per_space ) + processed_rooms.add(room_id) + rooms_result.extend(rooms) events_result.extend(events) - # add any children that we haven't already processed to the queue - for edge_event in events: - if edge_event["state_key"] not in processed_rooms: - room_queue.append(edge_event["state_key"]) + # add any children to the queue + room_queue.extend(edge_event["state_key"] for edge_event in events) return {"rooms": rooms_result, "events": events_result} @@ -200,6 +254,43 @@ class SpaceSummaryHandler: ) return (room_entry,), events_result + async def _summarize_remote_room( + self, + room: "_RoomQueueEntry", + suggested_only: bool, + max_children: Optional[int], + exclude_rooms: Iterable[str], + ) -> Tuple[Sequence[JsonDict], Sequence[JsonDict]]: + room_id = room.room_id + logger.info("Requesting summary for %s via %s", room_id, room.via) + + # we need to make the exclusion list json-serialisable + exclude_rooms = list(exclude_rooms) + + via = itertools.islice(room.via, MAX_SERVERS_PER_SPACE) + try: + res = await self._federation_client.get_space_summary( + via, + room_id, + suggested_only=suggested_only, + max_rooms_per_space=max_children, + exclude_rooms=exclude_rooms, + ) + except Exception as e: + logger.warning( + "Unable to get summary of %s via federation: %s", + room_id, + e, + exc_info=logger.isEnabledFor(logging.DEBUG), + ) + return (), () + + return res.rooms, tuple( + ev.data + for ev in res.events + if ev.event_type == EventTypes.MSC1772_SPACE_CHILD + ) + async def _is_room_accessible(self, room_id: str, requester: Optional[str]) -> bool: # if we have an authenticated requesting user, first check if they are in the # room @@ -276,12 +367,24 @@ class SpaceSummaryHandler: ) # filter out any events without a "via" (which implies it has been redacted) - return (e for e in events if e.content.get("via")) + return (e for e in events if _has_valid_via(e)) @attr.s(frozen=True, slots=True) class _RoomQueueEntry: room_id = attr.ib(type=str) + via = attr.ib(type=Sequence[str]) + + +def _has_valid_via(e: EventBase) -> bool: + via = e.content.get("via") + if not via or not isinstance(via, Sequence): + return False + for v in via: + if not isinstance(v, str): + logger.debug("Ignoring edge event %s with invalid via entry", e.event_id) + return False + return True def _is_suggested_child_event(edge_event: EventBase) -> bool: From 4c3827f2c18180533b804ebd4d8f5cb90774244b Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Wed, 24 Mar 2021 14:34:30 +0100 Subject: [PATCH 33/52] Enable addtional flake8-bugbear linting checks. (#9659) --- changelog.d/9659.misc | 1 + setup.cfg | 4 ++-- synapse/crypto/context_factory.py | 2 +- synapse/logging/context.py | 2 +- synapse/storage/database.py | 2 +- synapse/util/async_helpers.py | 2 +- synapse/util/caches/__init__.py | 2 +- tests/server.py | 2 +- 8 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 changelog.d/9659.misc diff --git a/changelog.d/9659.misc b/changelog.d/9659.misc new file mode 100644 index 000000000..6602c1cc6 --- /dev/null +++ b/changelog.d/9659.misc @@ -0,0 +1 @@ +Introduce bugbear to the test suite and fix some of it's lint violations. diff --git a/setup.cfg b/setup.cfg index 920868df2..7329eed21 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,8 +18,8 @@ ignore = # E203: whitespace before ':' (which is contrary to pep8?) # E731: do not assign a lambda expression, use a def # E501: Line too long (black enforces this for us) -# B00: Subsection of the bugbear suite (TODO: add in remaining fixes) -ignore=W503,W504,E203,E731,E501,B00 +# B00*: Subsection of the bugbear suite (TODO: add in remaining fixes) +ignore=W503,W504,E203,E731,E501,B006,B007,B008 [isort] line_length = 88 diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 4ca13011e..c644b4dfc 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -191,7 +191,7 @@ def _context_info_cb(ssl_connection, where, ret): # ... we further assume that SSLClientConnectionCreator has set the # '_synapse_tls_verifier' attribute to a ConnectionVerifier object. tls_protocol._synapse_tls_verifier.verify_context_info_cb(ssl_connection, where) - except: # noqa: E722, taken from the twisted implementation + except BaseException: # taken from the twisted implementation logger.exception("Error during info_callback") f = Failure() tls_protocol.failVerification(f) diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 1a7ea4fa9..03cf3c2b8 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -689,7 +689,7 @@ def run_in_background(f, *args, **kwargs) -> defer.Deferred: current = current_context() try: res = f(*args, **kwargs) - except: # noqa: E722 + except Exception: # the assumption here is that the caller doesn't want to be disturbed # by synchronous exceptions, so let's turn them into Failures. return defer.fail() diff --git a/synapse/storage/database.py b/synapse/storage/database.py index f1ba529a2..5b0b9a20b 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -670,7 +670,7 @@ class DatabasePool: for after_callback, after_args, after_kwargs in after_callbacks: after_callback(*after_args, **after_kwargs) - except: # noqa: E722, as we reraise the exception this is fine. + except Exception: for after_callback, after_args, after_kwargs in exception_callbacks: after_callback(*after_args, **after_kwargs) raise diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index f33c11584..c3b2d981e 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -496,7 +496,7 @@ def timeout_deferred( try: deferred.cancel() - except: # noqa: E722, if we throw any exception it'll break time outs + except Exception: # if we throw any exception it'll break time outs logger.exception("Canceller failed during timeout") # the cancel() call should have set off a chain of errbacks which diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index e676c2cac..f96870633 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -116,7 +116,7 @@ def register_cache( """ if resizable: if not resize_callback: - resize_callback = getattr(cache, "set_cache_factor") + resize_callback = cache.set_cache_factor # type: ignore add_resizable_cache(cache_name, resize_callback) metric = CacheMetric(cache, cache_type, cache_name, collect_callback) diff --git a/tests/server.py b/tests/server.py index 2287d2007..57cc4ac60 100644 --- a/tests/server.py +++ b/tests/server.py @@ -593,7 +593,7 @@ class FakeTransport: if self.disconnected: return - if getattr(self.other, "transport") is None: + if not hasattr(self.other, "transport"): # the other has no transport yet; reschedule if self.autoflush: self._reactor.callLater(0.0, self.flush) From c2d4bd62a26ea0f19445aa23367fa7ee1b4e5b08 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Wed, 24 Mar 2021 11:32:42 -0400 Subject: [PATCH 34/52] Fix typo in changelog. --- changelog.d/9499.misc | 2 +- changelog.d/9659.misc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/9499.misc b/changelog.d/9499.misc index 1513017a1..428a466fa 100644 --- a/changelog.d/9499.misc +++ b/changelog.d/9499.misc @@ -1 +1 @@ -Introduce bugbear to the test suite and fix some of it's lint violations. \ No newline at end of file +Introduce flake8-bugbear to the test suite and fix some of its lint violations. diff --git a/changelog.d/9659.misc b/changelog.d/9659.misc index 6602c1cc6..428a466fa 100644 --- a/changelog.d/9659.misc +++ b/changelog.d/9659.misc @@ -1 +1 @@ -Introduce bugbear to the test suite and fix some of it's lint violations. +Introduce flake8-bugbear to the test suite and fix some of its lint violations. From c602ba833627139103830f76803fc37b4adce37a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 24 Mar 2021 16:12:47 +0000 Subject: [PATCH 35/52] Fixed undefined variable error in catchup (#9664) Broke in #9640 Co-authored-by: Patrick Cloke --- changelog.d/9664.misc | 1 + synapse/federation/sender/per_destination_queue.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/9664.misc diff --git a/changelog.d/9664.misc b/changelog.d/9664.misc new file mode 100644 index 000000000..3d410ed4c --- /dev/null +++ b/changelog.d/9664.misc @@ -0,0 +1 @@ +Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index af85fe0a1..89df9a619 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -480,6 +480,8 @@ class PerDestinationQueue: # the other sending servers are up). if new_pdus: room_catchup_pdus = new_pdus + else: + room_catchup_pdus = [pdu] logger.info( "Catching up rooms to %s: %r", self._destination, pdu.room_id From fae81f2f680242bcabbff12fa9c1eb72ba017b6f Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 25 Mar 2021 10:34:23 +0000 Subject: [PATCH 36/52] Add a storage method for returning all current presence from all users (#9650) Split off from https://github.com/matrix-org/synapse/pull/9491 Adds a storage method for getting the current presence of all local users, optionally excluding those that are offline. This will be used by the code in #9491 when a PresenceRouter module informs Synapse that a given user should have `"ALL"` user presence updates routed to them. Specifically, it is used here: https://github.com/matrix-org/synapse/blob/b588f16e391d664b11f43257eabf70663f0c6d59/synapse/handlers/presence.py#L1131-L1133 Note that there is a `get_all_presence_updates` function just above. That function is intended to walk up the table through stream IDs, and is primarily used by the presence replication stream. I could possibly make use of it in the PresenceRouter-related code, but it would be a bit of a bodge. --- changelog.d/9650.misc | 1 + synapse/storage/database.py | 11 +++- synapse/storage/databases/main/presence.py | 60 +++++++++++++++++++++- 3 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 changelog.d/9650.misc diff --git a/changelog.d/9650.misc b/changelog.d/9650.misc new file mode 100644 index 000000000..d830ead70 --- /dev/null +++ b/changelog.d/9650.misc @@ -0,0 +1 @@ +Add a storage method for pulling all current user presence state from the database. \ No newline at end of file diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 5b0b9a20b..94590e7b4 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -1906,6 +1906,7 @@ class DatabasePool: retcols: Iterable[str], filters: Optional[Dict[str, Any]] = None, keyvalues: Optional[Dict[str, Any]] = None, + exclude_keyvalues: Optional[Dict[str, Any]] = None, order_direction: str = "ASC", ) -> List[Dict[str, Any]]: """ @@ -1929,7 +1930,10 @@ class DatabasePool: apply a WHERE ? LIKE ? clause. keyvalues: column names and values to select the rows with, or None to not - apply a WHERE clause. + apply a WHERE key = value clause. + exclude_keyvalues: + column names and values to exclude rows with, or None to not + apply a WHERE key != value clause. order_direction: Whether the results should be ordered "ASC" or "DESC". Returns: @@ -1938,7 +1942,7 @@ class DatabasePool: if order_direction not in ["ASC", "DESC"]: raise ValueError("order_direction must be one of 'ASC' or 'DESC'.") - where_clause = "WHERE " if filters or keyvalues else "" + where_clause = "WHERE " if filters or keyvalues or exclude_keyvalues else "" arg_list = [] # type: List[Any] if filters: where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters) @@ -1947,6 +1951,9 @@ class DatabasePool: if keyvalues: where_clause += " AND ".join("%s = ?" % (k,) for k in keyvalues) arg_list += list(keyvalues.values()) + if exclude_keyvalues: + where_clause += " AND ".join("%s != ?" % (k,) for k in exclude_keyvalues) + arg_list += list(exclude_keyvalues.values()) sql = "SELECT %s FROM %s %s ORDER BY %s %s LIMIT ? OFFSET ?" % ( ", ".join(retcols), diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 29edab34d..0ff693a31 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Tuple +from typing import Dict, List, Tuple from synapse.api.presence import UserPresenceState from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause @@ -157,5 +157,63 @@ class PresenceStore(SQLBaseStore): return {row["user_id"]: UserPresenceState(**row) for row in rows} + async def get_presence_for_all_users( + self, + include_offline: bool = True, + ) -> Dict[str, UserPresenceState]: + """Retrieve the current presence state for all users. + + Note that the presence_stream table is culled frequently, so it should only + contain the latest presence state for each user. + + Args: + include_offline: Whether to include offline presence states + + Returns: + A dict of user IDs to their current UserPresenceState. + """ + users_to_state = {} + + exclude_keyvalues = None + if not include_offline: + # Exclude offline presence state + exclude_keyvalues = {"state": "offline"} + + # This may be a very heavy database query. + # We paginate in order to not block a database connection. + limit = 100 + offset = 0 + while True: + rows = await self.db_pool.runInteraction( + "get_presence_for_all_users", + self.db_pool.simple_select_list_paginate_txn, + "presence_stream", + orderby="stream_id", + start=offset, + limit=limit, + exclude_keyvalues=exclude_keyvalues, + retcols=( + "user_id", + "state", + "last_active_ts", + "last_federation_update_ts", + "last_user_sync_ts", + "status_msg", + "currently_active", + ), + order_direction="ASC", + ) + + for row in rows: + users_to_state[row["user_id"]] = UserPresenceState(**row) + + # We've run out of updates to query + if len(rows) < limit: + break + + offset += limit + + return users_to_state + def get_current_presence_token(self): return self._presence_id_gen.get_current_token() From e5801db8305d548be2f2210366d185aef4e53127 Mon Sep 17 00:00:00 2001 From: Serban Constantin Date: Thu, 25 Mar 2021 17:31:26 +0200 Subject: [PATCH 37/52] platform specific prerequisites in source install (#9667) Make it clearer in the source install step that the platform specific prerequisites must be installed first. Signed-off-by: Serban Constantin --- INSTALL.md | 15 ++++++++------- changelog.d/9667.doc | 1 + 2 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 changelog.d/9667.doc diff --git a/INSTALL.md b/INSTALL.md index b9e3f613d..59318cb08 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -6,7 +6,7 @@ There are 3 steps to follow under **Installation Instructions**. - [Choosing your server name](#choosing-your-server-name) - [Installing Synapse](#installing-synapse) - [Installing from source](#installing-from-source) - - [Platform-Specific Instructions](#platform-specific-instructions) + - [Platform-specific prerequisites](#platform-specific-prerequisites) - [Debian/Ubuntu/Raspbian](#debianubunturaspbian) - [ArchLinux](#archlinux) - [CentOS/Fedora](#centosfedora) @@ -60,17 +60,14 @@ that your email address is probably `user@example.com` rather than (Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).) +When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed. + System requirements: - POSIX-compliant system (tested on Linux & OS X) - Python 3.5.2 or later, up to Python 3.9. - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org -Synapse is written in Python but some of the libraries it uses are written in -C. So before we can install Synapse itself we need a working C compiler and the -header files for Python C extensions. See [Platform-Specific -Instructions](#platform-specific-instructions) for information on installing -these on various platforms. To install the Synapse homeserver run: @@ -128,7 +125,11 @@ source env/bin/activate synctl start ``` -#### Platform-Specific Instructions +#### Platform-specific prerequisites + +Synapse is written in Python but some of the libraries it uses are written in +C. So before we can install Synapse itself we need a working C compiler and the +header files for Python C extensions. ##### Debian/Ubuntu/Raspbian diff --git a/changelog.d/9667.doc b/changelog.d/9667.doc new file mode 100644 index 000000000..dec4816b4 --- /dev/null +++ b/changelog.d/9667.doc @@ -0,0 +1 @@ +Update source install documentation to mention platform prerequisites before the source install steps. \ No newline at end of file From d4c4798a2548a53b63546a176f6dd350c4ad26bc Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 25 Mar 2021 17:53:54 +0100 Subject: [PATCH 38/52] Use interpreter from $PATH instead of absolute paths in various scripts using /usr/bin/env (#9689) On NixOS, `bash` isn't under `/bin/bash` but rather in some directory in `$PATH`. Locally, I've been patching those scripts to make them work. `/usr/bin/env` seems to be the only [portable way](https://unix.stackexchange.com/questions/29608/why-is-it-better-to-use-usr-bin-env-name-instead-of-path-to-name-as-my) to use binaries from the PATH as interpreters. Signed-off-by: Quentin Gliech --- .buildkite/scripts/test_old_deps.sh | 2 +- .buildkite/scripts/test_synapse_port_db.sh | 2 +- changelog.d/9689.misc | 1 + contrib/purge_api/purge_history.sh | 2 +- contrib/purge_api/purge_remote_media.sh | 2 +- demo/clean.sh | 2 +- demo/start.sh | 2 +- demo/stop.sh | 2 +- docker/build_debian.sh | 2 +- docker/run_pg_tests.sh | 2 +- scripts-dev/check-newsfragment | 2 +- scripts-dev/config-lint.sh | 2 +- scripts-dev/generate_sample_config | 2 +- scripts-dev/lint.sh | 2 +- scripts-dev/make_full_schema.sh | 2 +- scripts-dev/next_github_number.sh | 4 ++-- test_postgresql.sh | 2 +- 17 files changed, 18 insertions(+), 17 deletions(-) create mode 100644 changelog.d/9689.misc diff --git a/.buildkite/scripts/test_old_deps.sh b/.buildkite/scripts/test_old_deps.sh index 28e6694b5..9fe5b696b 100755 --- a/.buildkite/scripts/test_old_deps.sh +++ b/.buildkite/scripts/test_old_deps.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # this script is run by buildkite in a plain `xenial` container; it installs the # minimal requirements for tox and hands over to the py35-old tox environment. diff --git a/.buildkite/scripts/test_synapse_port_db.sh b/.buildkite/scripts/test_synapse_port_db.sh index 9ed217763..8914319e3 100755 --- a/.buildkite/scripts/test_synapse_port_db.sh +++ b/.buildkite/scripts/test_synapse_port_db.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along # with additional dependencies needed for the test (such as coverage or the PostgreSQL diff --git a/changelog.d/9689.misc b/changelog.d/9689.misc new file mode 100644 index 000000000..a08d3482a --- /dev/null +++ b/changelog.d/9689.misc @@ -0,0 +1 @@ +Use interpreter from `$PATH` via `/usr/bin/env` instead of absolute paths in various scripts. diff --git a/contrib/purge_api/purge_history.sh b/contrib/purge_api/purge_history.sh index e7dd5d646..c45136ff5 100644 --- a/contrib/purge_api/purge_history.sh +++ b/contrib/purge_api/purge_history.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # this script will use the api: # https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst diff --git a/contrib/purge_api/purge_remote_media.sh b/contrib/purge_api/purge_remote_media.sh index 77220d3bd..4930d9529 100644 --- a/contrib/purge_api/purge_remote_media.sh +++ b/contrib/purge_api/purge_remote_media.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash DOMAIN=yourserver.tld # add this user as admin in your home server: diff --git a/demo/clean.sh b/demo/clean.sh index 418ca9457..6b809f6e8 100755 --- a/demo/clean.sh +++ b/demo/clean.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e diff --git a/demo/start.sh b/demo/start.sh index f6b5ea137..621a5698b 100755 --- a/demo/start.sh +++ b/demo/start.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash DIR="$( cd "$( dirname "$0" )" && pwd )" diff --git a/demo/stop.sh b/demo/stop.sh index 85a1d2c16..f9dddc591 100755 --- a/demo/stop.sh +++ b/demo/stop.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash DIR="$( cd "$( dirname "$0" )" && pwd )" diff --git a/docker/build_debian.sh b/docker/build_debian.sh index f312f0715..f426d2b77 100644 --- a/docker/build_debian.sh +++ b/docker/build_debian.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # The script to build the Debian package, as ran inside the Docker image. diff --git a/docker/run_pg_tests.sh b/docker/run_pg_tests.sh index d18d1e4c8..1fd08cb62 100755 --- a/docker/run_pg_tests.sh +++ b/docker/run_pg_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script runs the PostgreSQL tests inside a Docker container. It expects # the relevant source files to be mounted into /src (done automatically by the diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment index 448cadb82..af6d32e33 100755 --- a/scripts-dev/check-newsfragment +++ b/scripts-dev/check-newsfragment @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # A script which checks that an appropriate news file has been added on this # branch. diff --git a/scripts-dev/config-lint.sh b/scripts-dev/config-lint.sh index 913216046..8c6323e59 100755 --- a/scripts-dev/config-lint.sh +++ b/scripts-dev/config-lint.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Find linting errors in Synapse's default config file. # Exits with 0 if there are no problems, or another code otherwise. diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config index 9cb4630a5..02739894b 100755 --- a/scripts-dev/generate_sample_config +++ b/scripts-dev/generate_sample_config @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Update/check the docs/sample_config.yaml diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index fe2965cd3..9761e9759 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Runs linting scripts over the local Synapse checkout # isort - sorts import statements diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh index b8d1e636f..bc8f97866 100755 --- a/scripts-dev/make_full_schema.sh +++ b/scripts-dev/make_full_schema.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # This script generates SQL files for creating a brand new Synapse DB with the latest # schema, on both SQLite3 and Postgres. diff --git a/scripts-dev/next_github_number.sh b/scripts-dev/next_github_number.sh index 376280025..00e9b1456 100755 --- a/scripts-dev/next_github_number.sh +++ b/scripts-dev/next_github_number.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e @@ -6,4 +6,4 @@ set -e # next PR number. CURRENT_NUMBER=`curl -s "https://api.github.com/repos/matrix-org/synapse/issues?state=all&per_page=1" | jq -r ".[0].number"` CURRENT_NUMBER=$((CURRENT_NUMBER+1)) -echo $CURRENT_NUMBER \ No newline at end of file +echo $CURRENT_NUMBER diff --git a/test_postgresql.sh b/test_postgresql.sh index 1ffcaabd3..c10828fbb 100755 --- a/test_postgresql.sh +++ b/test_postgresql.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script builds the Docker image to run the PostgreSQL tests, and then runs # the tests. From d7d4232a2d8e1ca3c27ced3dc8b0dcda0a9e4c6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paul=20T=C3=B6tterman?= Date: Fri, 26 Mar 2021 12:38:31 +0200 Subject: [PATCH 39/52] Preserve host in example apache config (#9696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes redirect loop Signed-off-by: Paul Tötterman --- UPGRADE.rst | 9 ++++++--- docs/reverse_proxy.md | 7 +++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/UPGRADE.rst b/UPGRADE.rst index 8bc2ff91a..ba488e104 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -98,9 +98,12 @@ will log a warning on each received request. To avoid the warning, administrators using a reverse proxy should ensure that the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to -indicate the protocol used by the client. See the `reverse proxy documentation -`_, where the example configurations have been updated to -show how to set this header. +indicate the protocol used by the client. + +Synapse also requires the `Host` header to be preserved. + +See the `reverse proxy documentation `_, where the +example configurations have been updated to show how to set these headers. (Users of `Caddy `_ are unaffected, since we believe it sets `X-Forwarded-Proto` by default.) diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md index 860afd5a0..cf1b835b9 100644 --- a/docs/reverse_proxy.md +++ b/docs/reverse_proxy.md @@ -104,10 +104,11 @@ example.com:8448 { ``` SSLEngine on - ServerName matrix.example.com; + ServerName matrix.example.com RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME} AllowEncodedSlashes NoDecode + ProxyPreserveHost on ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix ProxyPass /_synapse/client http://127.0.0.1:8008/_synapse/client nocanon @@ -116,7 +117,7 @@ example.com:8448 { SSLEngine on - ServerName example.com; + ServerName example.com RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME} AllowEncodedSlashes NoDecode @@ -135,6 +136,8 @@ example.com:8448 { ``` +**NOTE 3**: Missing `ProxyPreserveHost on` can lead to a redirect loop. + ### HAProxy ``` From b5efcb577e2c9b8b38cb86f87cf65fa93eb2566b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 26 Mar 2021 16:49:46 +0000 Subject: [PATCH 40/52] Make it possible to use dmypy (#9692) Running `dmypy run` will do a `mypy` check while spinning up a daemon that makes rerunning `dmypy run` a lot faster. `dmypy` doesn't support `follow_imports = silent` and has `local_partial_types` enabled, so this PR enables those options and fixes the issues that were newly raised. Note that `local_partial_types` will be enabled by default in upcoming mypy releases. --- changelog.d/9692.misc | 1 + mypy.ini | 3 +- synapse/api/auth.py | 5 ++++ synapse/config/cache.py | 6 ++-- synapse/handlers/oidc_handler.py | 3 ++ synapse/logging/opentracing.py | 2 +- synapse/replication/tcp/protocol.py | 2 +- synapse/rest/admin/rooms.py | 3 ++ synapse/rest/admin/users.py | 3 ++ synapse/rest/client/v2_alpha/sync.py | 3 ++ synapse/rest/media/v1/preview_url_resource.py | 2 ++ synapse/rest/synapse/client/pick_username.py | 3 ++ synapse/util/caches/__init__.py | 4 +-- tests/replication/tcp/streams/test_typing.py | 1 + tests/replication/test_multi_media_repo.py | 4 +-- tests/server.py | 28 +++++++++++++------ 16 files changed, 56 insertions(+), 17 deletions(-) create mode 100644 changelog.d/9692.misc diff --git a/changelog.d/9692.misc b/changelog.d/9692.misc new file mode 100644 index 000000000..d02002586 --- /dev/null +++ b/changelog.d/9692.misc @@ -0,0 +1 @@ +Make it possible to use `dmypy`. diff --git a/mypy.ini b/mypy.ini index 709a8d07a..3ae5d4578 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,12 +1,13 @@ [mypy] namespace_packages = True plugins = mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py -follow_imports = silent +follow_imports = normal check_untyped_defs = True show_error_codes = True show_traceback = True mypy_path = stubs warn_unreachable = True +local_partial_types = True # To find all folders that pass mypy you run: # diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e10e33fd2..7d9930ae7 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -558,6 +558,9 @@ class Auth: Returns: bool: False if no access_token was given, True otherwise. """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + query_params = request.args.get(b"access_token") auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") return bool(query_params) or bool(auth_headers) @@ -574,6 +577,8 @@ class Auth: MissingClientTokenError: If there isn't a single access_token in the request """ + # This will always be set by the time Twisted calls us. + assert request.args is not None auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") query_params = request.args.get(b"access_token") diff --git a/synapse/config/cache.py b/synapse/config/cache.py index 8e03f1400..4e8abbf88 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -24,7 +24,7 @@ from ._base import Config, ConfigError _CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR" # Map from canonicalised cache name to cache. -_CACHES = {} +_CACHES = {} # type: Dict[str, Callable[[float], None]] # a lock on the contents of _CACHES _CACHES_LOCK = threading.Lock() @@ -59,7 +59,9 @@ def _canonicalise_cache_name(cache_name: str) -> str: return cache_name.lower() -def add_resizable_cache(cache_name: str, cache_resize_callback: Callable): +def add_resizable_cache( + cache_name: str, cache_resize_callback: Callable[[float], None] +): """Register a cache that's size can dynamically change Args: diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py index bc3630e9e..6624212d6 100644 --- a/synapse/handlers/oidc_handler.py +++ b/synapse/handlers/oidc_handler.py @@ -149,6 +149,9 @@ class OidcHandler: Args: request: the incoming request from the browser. """ + # This will always be set by the time Twisted calls us. + assert request.args is not None + # The provider might redirect with an error. # In that case, just display it as-is. if b"error" in request.args: diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 10bd4a146..c6e633574 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -262,7 +262,7 @@ logger = logging.getLogger(__name__) # Block everything by default # A regex which matches the server_names to expose traces for. # None means 'block everything'. -_homeserver_whitelist = None +_homeserver_whitelist = None # type: Optional[re.Pattern[str]] # Util methods diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 825900f64..e829add25 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -104,7 +104,7 @@ tcp_outbound_commands_counter = Counter( # A list of all connected protocols. This allows us to send metrics about the # connections. -connected_connections = [] +connected_connections = [] # type: List[BaseReplicationStreamProtocol] logger = logging.getLogger(__name__) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 263d8ec07..cfe1bebb9 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -390,6 +390,9 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, RestServlet): async def on_POST( self, request: SynapseRequest, room_identifier: str ) -> Tuple[int, JsonDict]: + # This will always be set by the time Twisted calls us. + assert request.args is not None + requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester.user) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index aaa56a702..309bd2771 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -833,6 +833,9 @@ class UserMediaRestServlet(RestServlet): async def on_GET( self, request: SynapseRequest, user_id: str ) -> Tuple[int, JsonDict]: + # This will always be set by the time Twisted calls us. + assert request.args is not None + await assert_requester_is_admin(self.auth, request) if not self.is_mine(UserID.from_string(user_id)): diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index a0db0a054..3481770c8 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -91,6 +91,9 @@ class SyncRestServlet(RestServlet): self._event_serializer = hs.get_event_client_serializer() async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + # This will always be set by the time Twisted calls us. + assert request.args is not None + if b"from" in request.args: # /events used to use 'from', but /sync uses 'since'. # Lets be helpful and whine if we see a 'from'. diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index e590a0dea..c4ed9dfdb 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -187,6 +187,8 @@ class PreviewUrlResource(DirectServeJsonResource): respond_with_json(request, 200, {}, send_cors=True) async def _async_render_GET(self, request: SynapseRequest) -> None: + # This will always be set by the time Twisted calls us. + assert request.args is not None # XXX: if get_user_by_req fails, what should we do in an async render? requester = await self.auth.get_user_by_req(request) diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py index 51acaa9a9..d9ffe8448 100644 --- a/synapse/rest/synapse/client/pick_username.py +++ b/synapse/rest/synapse/client/pick_username.py @@ -104,6 +104,9 @@ class AccountDetailsResource(DirectServeHtmlResource): respond_with_html(request, 200, html) async def _async_render_POST(self, request: SynapseRequest): + # This will always be set by the time Twisted calls us. + assert request.args is not None + try: session_id = get_username_mapping_session_cookie_from_request(request) except SynapseError as e: diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index f96870633..48f64eeb3 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -25,8 +25,8 @@ from synapse.config.cache import add_resizable_cache logger = logging.getLogger(__name__) -caches_by_name = {} -collectors_by_name = {} # type: Dict +caches_by_name = {} # type: Dict[str, Sized] +collectors_by_name = {} # type: Dict[str, CacheMetric] cache_size = Gauge("synapse_util_caches_cache:size", "", ["name"]) cache_hits = Gauge("synapse_util_caches_cache:hits", "", ["name"]) diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py index 5acfb3e53..ca49d4dd3 100644 --- a/tests/replication/tcp/streams/test_typing.py +++ b/tests/replication/tcp/streams/test_typing.py @@ -69,6 +69,7 @@ class TypingStreamTestCase(BaseStreamTestCase): self.assert_request_is_get_repl_stream_updates(request, "typing") # The from token should be the token from the last RDATA we got. + assert request.args is not None self.assertEqual(int(request.args[b"from_token"][0]), token) self.test_handler.on_rdata.assert_called_once() diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index 7ff11cde1..b0800f984 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -15,7 +15,7 @@ import logging import os from binascii import unhexlify -from typing import Tuple +from typing import Optional, Tuple from twisted.internet.protocol import Factory from twisted.protocols.tls import TLSMemoryBIOFactory @@ -32,7 +32,7 @@ from tests.server import FakeChannel, FakeSite, FakeTransport, make_request logger = logging.getLogger(__name__) -test_server_connection_factory = None +test_server_connection_factory = None # type: Optional[TestServerTLSConnectionFactory] class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase): diff --git a/tests/server.py b/tests/server.py index 57cc4ac60..b535a5d88 100644 --- a/tests/server.py +++ b/tests/server.py @@ -2,7 +2,7 @@ import json import logging from collections import deque from io import SEEK_END, BytesIO -from typing import Callable, Iterable, MutableMapping, Optional, Tuple, Union +from typing import Callable, Dict, Iterable, MutableMapping, Optional, Tuple, Union import attr from typing_extensions import Deque @@ -13,8 +13,11 @@ from twisted.internet._resolver import SimpleResolverComplexifier from twisted.internet.defer import Deferred, fail, succeed from twisted.internet.error import DNSLookupError from twisted.internet.interfaces import ( + IHostnameResolver, + IProtocol, + IPullProducer, + IPushProducer, IReactorPluggableNameResolver, - IReactorTCP, IResolverSimple, ITransport, ) @@ -45,11 +48,11 @@ class FakeChannel: wire). """ - site = attr.ib(type=Site) + site = attr.ib(type=Union[Site, "FakeSite"]) _reactor = attr.ib() result = attr.ib(type=dict, default=attr.Factory(dict)) _ip = attr.ib(type=str, default="127.0.0.1") - _producer = None + _producer = None # type: Optional[Union[IPullProducer, IPushProducer]] @property def json_body(self): @@ -159,7 +162,11 @@ class FakeChannel: Any cookines found are added to the given dict """ - for h in self.headers.getRawHeaders("Set-Cookie"): + headers = self.headers.getRawHeaders("Set-Cookie") + if not headers: + return + + for h in headers: parts = h.split(";") k, v = parts[0].split("=", maxsplit=1) cookies[k] = v @@ -311,8 +318,8 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): self._tcp_callbacks = {} self._udp = [] - lookups = self.lookups = {} - self._thread_callbacks = deque() # type: Deque[Callable[[], None]]() + lookups = self.lookups = {} # type: Dict[str, str] + self._thread_callbacks = deque() # type: Deque[Callable[[], None]] @implementer(IResolverSimple) class FakeResolver: @@ -324,6 +331,9 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): self.nameResolver = SimpleResolverComplexifier(FakeResolver()) super().__init__() + def installNameResolver(self, resolver: IHostnameResolver) -> IHostnameResolver: + raise NotImplementedError() + def listenUDP(self, port, protocol, interface="", maxPacketSize=8196): p = udp.Port(port, protocol, interface, maxPacketSize, self) p.startListening() @@ -621,7 +631,9 @@ class FakeTransport: self.disconnected = True -def connect_client(reactor: IReactorTCP, client_id: int) -> AccumulatingProtocol: +def connect_client( + reactor: ThreadedMemoryReactorClock, client_id: int +) -> Tuple[IProtocol, AccumulatingProtocol]: """ Connect a client to a fake TCP transport. From 7c8402ddb88df09cf1a33b3925c2996926fd0a23 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 26 Mar 2021 17:33:55 +0000 Subject: [PATCH 41/52] Suppress CryptographyDeprecationWarning (#9698) This warning is somewhat confusing to users, so let's suppress it --- changelog.d/9698.misc | 1 + synapse/app/_base.py | 21 +++++++++++++++++++++ synapse/app/generic_worker.py | 9 ++------- synapse/app/homeserver.py | 9 ++------- 4 files changed, 26 insertions(+), 14 deletions(-) create mode 100644 changelog.d/9698.misc diff --git a/changelog.d/9698.misc b/changelog.d/9698.misc new file mode 100644 index 000000000..d199e846c --- /dev/null +++ b/changelog.d/9698.misc @@ -0,0 +1 @@ +Suppress "CryptographyDeprecationWarning: int_from_bytes is deprecated". diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 43b1f1e94..3912c8994 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -21,8 +21,10 @@ import signal import socket import sys import traceback +import warnings from typing import Awaitable, Callable, Iterable +from cryptography.utils import CryptographyDeprecationWarning from typing_extensions import NoReturn from twisted.internet import defer, error, reactor @@ -195,6 +197,25 @@ def listen_metrics(bind_addresses, port): start_http_server(port, addr=host, registry=RegistryProxy) +def listen_manhole(bind_addresses: Iterable[str], port: int, manhole_globals: dict): + # twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing + # warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so + # suppress the warning for now. + warnings.filterwarnings( + action="ignore", + category=CryptographyDeprecationWarning, + message="int_from_bytes is deprecated", + ) + + from synapse.util.manhole import manhole + + listen_tcp( + bind_addresses, + port, + manhole(username="matrix", password="rabbithole", globals=manhole_globals), + ) + + def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): """ Create a TCP socket for a port and several addresses diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index caef394e1..6139881db 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -147,7 +147,6 @@ from synapse.storage.databases.main.user_directory import UserDirectoryStore from synapse.types import ReadReceipt from synapse.util.async_helpers import Linearizer from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string logger = logging.getLogger("synapse.app.generic_worker") @@ -640,12 +639,8 @@ class GenericWorkerServer(HomeServer): if listener.type == "http": self._listen_http(listener) elif listener.type == "manhole": - _base.listen_tcp( - listener.bind_addresses, - listener.port, - manhole( - username="matrix", password="rabbithole", globals={"hs": self} - ), + _base.listen_manhole( + listener.bind_addresses, listener.port, manhole_globals={"hs": self} ) elif listener.type == "metrics": if not self.get_config().enable_metrics: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 244657cb8..3bfe9d507 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -67,7 +67,6 @@ from synapse.storage import DataStore from synapse.storage.engines import IncorrectDatabaseSetup from synapse.storage.prepare_database import UpgradeDatabaseException from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.manhole import manhole from synapse.util.module_loader import load_module from synapse.util.versionstring import get_version_string @@ -288,12 +287,8 @@ class SynapseHomeServer(HomeServer): if listener.type == "http": self._listening_services.extend(self._listener_http(config, listener)) elif listener.type == "manhole": - listen_tcp( - listener.bind_addresses, - listener.port, - manhole( - username="matrix", password="rabbithole", globals={"hs": self} - ), + _base.listen_manhole( + listener.bind_addresses, listener.port, manhole_globals={"hs": self} ) elif listener.type == "replication": services = listen_tcp( From 0a778c135f37243277d1dbac2a6490455b228cfc Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 26 Mar 2021 13:42:58 -0500 Subject: [PATCH 42/52] Make pip install faster in Docker build for Complement testing (#9610) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Make pip install faster in Docker build for [Complement](https://github.com/matrix-org/complement) testing. If files have changed in a `COPY` command, Docker will invalidate all of the layers below. So I changed the order of operations to install all dependencies before we `COPY synapse /synapse/synapse/`. This allows Docker to use our cached layer of dependencies even when we change the source of Synapse and speed up builds dramatically! `53.5s` -> `3.7s` builds 🤘 As an alternative, I did try using BuildKit caches but this still took 30 seconds overall on that step. 15 seconds to gather the dependencies from the cache and another 15 seconds to `Installing collected packages`. Fix https://github.com/matrix-org/synapse/issues/9364 --- changelog.d/9610.docker | 1 + docker/Dockerfile | 82 ++++++++++++++++++++--------------------- 2 files changed, 41 insertions(+), 42 deletions(-) create mode 100644 changelog.d/9610.docker diff --git a/changelog.d/9610.docker b/changelog.d/9610.docker new file mode 100644 index 000000000..056252a66 --- /dev/null +++ b/changelog.d/9610.docker @@ -0,0 +1 @@ +Speed up Docker builds and make it nicer to test against Complement while developing (install all dependencies before copying the project). diff --git a/docker/Dockerfile b/docker/Dockerfile index a442b3459..5b7bf0277 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -25,42 +25,40 @@ LABEL org.opencontainers.image.licenses='Apache-2.0' # install the OS build deps RUN apt-get update && apt-get install -y \ - build-essential \ - libffi-dev \ - libjpeg-dev \ - libpq-dev \ - libssl-dev \ - libwebp-dev \ - libxml++2.6-dev \ - libxslt1-dev \ - openssl \ - rustc \ - zlib1g-dev \ - && rm -rf /var/lib/apt/lists/* + build-essential \ + libffi-dev \ + libjpeg-dev \ + libpq-dev \ + libssl-dev \ + libwebp-dev \ + libxml++2.6-dev \ + libxslt1-dev \ + openssl \ + rustc \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* -# Build dependencies that are not available as wheels, to speed up rebuilds -RUN pip install --prefix="/install" --no-warn-script-location \ - cryptography \ - frozendict \ - jaeger-client \ - opentracing \ - # Match the version constraints of Synapse - "prometheus_client>=0.4.0" \ - psycopg2 \ - pycparser \ - pyrsistent \ - pyyaml \ - simplejson \ - threadloop \ - thrift - -# now install synapse and all of the python deps to /install. -COPY synapse /synapse/synapse/ +# Copy just what we need to pip install COPY scripts /synapse/scripts/ COPY MANIFEST.in README.rst setup.py synctl /synapse/ +COPY synapse/__init__.py /synapse/synapse/__init__.py +COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py +# To speed up rebuilds, install all of the dependencies before we copy over +# the whole synapse project so that we this layer in the Docker cache can be +# used while you develop on the source +# +# This is aiming at installing the `install_requires` and `extras_require` from `setup.py` RUN pip install --prefix="/install" --no-warn-script-location \ - /synapse[all] + /synapse[all] + +# Copy over the rest of the project +COPY synapse /synapse/synapse/ + +# Install the synapse package itself and all of its children packages. +# +# This is aiming at installing only the `packages=find_packages(...)` from `setup.py +RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse ### ### Stage 1: runtime @@ -69,16 +67,16 @@ RUN pip install --prefix="/install" --no-warn-script-location \ FROM docker.io/python:${PYTHON_VERSION}-slim RUN apt-get update && apt-get install -y \ - curl \ - gosu \ - libjpeg62-turbo \ - libpq5 \ - libwebp6 \ - xmlsec1 \ - libjemalloc2 \ - libssl-dev \ - openssl \ - && rm -rf /var/lib/apt/lists/* + curl \ + gosu \ + libjpeg62-turbo \ + libpq5 \ + libwebp6 \ + xmlsec1 \ + libjemalloc2 \ + libssl-dev \ + openssl \ + && rm -rf /var/lib/apt/lists/* COPY --from=builder /install /usr/local COPY ./docker/start.py /start.py @@ -91,4 +89,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp ENTRYPOINT ["/start.py"] HEALTHCHECK --interval=1m --timeout=5s \ - CMD curl -fSs http://localhost:8008/health || exit 1 + CMD curl -fSs http://localhost:8008/health || exit 1 From ad8690a26c891a8e3444a3e80bdcdb3ba0d6a02b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 29 Mar 2021 11:55:33 +0100 Subject: [PATCH 43/52] Fix the suggested pip incantation for cryptography (#9699) If you have the wrong version of `cryptography` installed, synapse suggests: ``` To install run: pip install --upgrade --force 'cryptography>=3.4.7;python_version>='3.6'' ``` However, the use of ' inside '...' doesn't work, so when you run this, you get an error. --- changelog.d/9699.bugfix | 1 + synapse/python_dependencies.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9699.bugfix diff --git a/changelog.d/9699.bugfix b/changelog.d/9699.bugfix new file mode 100644 index 000000000..e871825b3 --- /dev/null +++ b/changelog.d/9699.bugfix @@ -0,0 +1 @@ +Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 14ddaed02..2a1c925ee 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import itertools import logging from typing import List, Set @@ -101,7 +102,7 @@ CONDITIONAL_REQUIREMENTS = { "txacme>=0.9.2", # txacme depends on eliot. Eliot 1.8.0 is incompatible with # python 3.5.2, as per https://github.com/itamarst/eliot/issues/418 - 'eliot<1.8.0;python_version<"3.5.3"', + "eliot<1.8.0;python_version<'3.5.3'", ], "saml2": [ # pysaml2 6.4.0 is incompatible with Python 3.5 (see https://github.com/IdentityPython/pysaml2/issues/749) @@ -131,6 +132,18 @@ for name, optional_deps in CONDITIONAL_REQUIREMENTS.items(): ALL_OPTIONAL_REQUIREMENTS = set(optional_deps) | ALL_OPTIONAL_REQUIREMENTS +# ensure there are no double-quote characters in any of the deps (otherwise the +# 'pip install' incantation in DependencyException will break) +for dep in itertools.chain( + REQUIREMENTS, + *CONDITIONAL_REQUIREMENTS.values(), +): + if '"' in dep: + raise Exception( + "Dependency `%s` contains double-quote; use single-quotes instead" % (dep,) + ) + + def list_requirements(): return list(set(REQUIREMENTS) | ALL_OPTIONAL_REQUIREMENTS) @@ -150,7 +163,7 @@ class DependencyException(Exception): @property def dependencies(self): for i in self.args[0]: - yield "'" + i + "'" + yield '"' + i + '"' def check_requirements(for_feature=None): From fc53a606e4ee7d94e2fadfb00d19b7e8e21699ad Mon Sep 17 00:00:00 2001 From: Jonathan de Jong Date: Mon, 29 Mar 2021 15:40:45 +0200 Subject: [PATCH 44/52] Fix `re.Pattern` mypy error on 3.6 (#9703) --- changelog.d/9703.misc | 1 + synapse/logging/opentracing.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/9703.misc diff --git a/changelog.d/9703.misc b/changelog.d/9703.misc new file mode 100644 index 000000000..8dda73922 --- /dev/null +++ b/changelog.d/9703.misc @@ -0,0 +1 @@ +Fix undetected mypy error when using Python 3.6. \ No newline at end of file diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index c6e633574..aa146e8bb 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -169,7 +169,7 @@ import inspect import logging import re from functools import wraps -from typing import TYPE_CHECKING, Dict, Optional, Type +from typing import TYPE_CHECKING, Dict, Optional, Pattern, Type import attr @@ -262,7 +262,7 @@ logger = logging.getLogger(__name__) # Block everything by default # A regex which matches the server_names to expose traces for. # None means 'block everything'. -_homeserver_whitelist = None # type: Optional[re.Pattern[str]] +_homeserver_whitelist = None # type: Optional[Pattern[str]] # Util methods From 5fdff977195dd8a9821f75c451a2a765a9f198f7 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 29 Mar 2021 14:42:38 +0100 Subject: [PATCH 45/52] Fix CI by ignore type for None module import (#9709) --- changelog.d/9709.misc | 1 + tests/replication/_base.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/9709.misc diff --git a/changelog.d/9709.misc b/changelog.d/9709.misc new file mode 100644 index 000000000..10542fdf3 --- /dev/null +++ b/changelog.d/9709.misc @@ -0,0 +1 @@ +Fix type-checking CI on develop. \ No newline at end of file diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 67b791366..1d4a59286 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -44,7 +44,7 @@ from tests.server import FakeTransport try: import hiredis except ImportError: - hiredis = None + hiredis = None # type: ignore logger = logging.getLogger(__name__) From 4bbd53545041c9295fbae5dd0cefdd66d55d0b53 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 29 Mar 2021 15:40:11 +0100 Subject: [PATCH 46/52] Update the OIDC sample config (#9695) I've reiterated the advice about using `oidc` to migrate, since I've seen a few people caught by this. I've also removed a couple of the examples as they are duplicating the OIDC documentation, and I think they might be leading people astray. --- changelog.d/9695.doc | 1 + docs/sample_config.yaml | 34 +++------------------------------- synapse/config/oidc_config.py | 34 +++------------------------------- 3 files changed, 7 insertions(+), 62 deletions(-) create mode 100644 changelog.d/9695.doc diff --git a/changelog.d/9695.doc b/changelog.d/9695.doc new file mode 100644 index 000000000..cf82e68a8 --- /dev/null +++ b/changelog.d/9695.doc @@ -0,0 +1 @@ +Update the sample configuration for OIDC authentication. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 07a928224..17cda71ad 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1758,6 +1758,9 @@ saml2_config: # Note that, if this is changed, users authenticating via that provider # will no longer be recognised as the same user! # +# (Use "oidc" here if you are migrating from an old "oidc_config" +# configuration.) +# # idp_name: A user-facing name for this identity provider, which is used to # offer the user a choice of login mechanisms. # @@ -1927,37 +1930,6 @@ oidc_providers: # - attribute: userGroup # value: "synapseUsers" - # For use with Keycloak - # - #- idp_id: keycloak - # idp_name: Keycloak - # issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name" - # client_id: "synapse" - # client_secret: "copy secret generated in Keycloak UI" - # scopes: ["openid", "profile"] - # attribute_requirements: - # - attribute: groups - # value: "admin" - - # For use with Github - # - #- idp_id: github - # idp_name: Github - # idp_brand: github - # discover: false - # issuer: "https://github.com/" - # client_id: "your-client-id" # TO BE FILLED - # client_secret: "your-client-secret" # TO BE FILLED - # authorization_endpoint: "https://github.com/login/oauth/authorize" - # token_endpoint: "https://github.com/login/oauth/access_token" - # userinfo_endpoint: "https://api.github.com/user" - # scopes: ["read:user"] - # user_mapping_provider: - # config: - # subject_claim: "id" - # localpart_template: "{{ user.login }}" - # display_name_template: "{{ user.name }}" - # Enable Central Authentication Service (CAS) for registration and login. # diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py index 747ab9a7f..05733ec41 100644 --- a/synapse/config/oidc_config.py +++ b/synapse/config/oidc_config.py @@ -79,6 +79,9 @@ class OIDCConfig(Config): # Note that, if this is changed, users authenticating via that provider # will no longer be recognised as the same user! # + # (Use "oidc" here if you are migrating from an old "oidc_config" + # configuration.) + # # idp_name: A user-facing name for this identity provider, which is used to # offer the user a choice of login mechanisms. # @@ -247,37 +250,6 @@ class OIDCConfig(Config): # attribute_requirements: # - attribute: userGroup # value: "synapseUsers" - - # For use with Keycloak - # - #- idp_id: keycloak - # idp_name: Keycloak - # issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name" - # client_id: "synapse" - # client_secret: "copy secret generated in Keycloak UI" - # scopes: ["openid", "profile"] - # attribute_requirements: - # - attribute: groups - # value: "admin" - - # For use with Github - # - #- idp_id: github - # idp_name: Github - # idp_brand: github - # discover: false - # issuer: "https://github.com/" - # client_id: "your-client-id" # TO BE FILLED - # client_secret: "your-client-secret" # TO BE FILLED - # authorization_endpoint: "https://github.com/login/oauth/authorize" - # token_endpoint: "https://github.com/login/oauth/access_token" - # userinfo_endpoint: "https://api.github.com/user" - # scopes: ["read:user"] - # user_mapping_provider: - # config: - # subject_claim: "id" - # localpart_template: "{{{{ user.login }}}}" - # display_name_template: "{{{{ user.name }}}}" """.format( mapping_provider=DEFAULT_USER_MAPPING_PROVIDER ) From da75d2ea1f2784791399dbeba16be401e2bb37d2 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 29 Mar 2021 11:43:20 -0400 Subject: [PATCH 47/52] Add type hints for the federation sender. (#9681) Includes an abstract base class which both the FederationSender and the FederationRemoteSendQueue must implement. --- changelog.d/9681.misc | 1 + synapse/app/generic_worker.py | 7 -- synapse/federation/send_queue.py | 88 ++++++++----- synapse/federation/sender/__init__.py | 116 ++++++++++++++++-- synapse/replication/tcp/commands.py | 6 +- synapse/replication/tcp/streams/federation.py | 14 ++- synapse/server.py | 4 +- 7 files changed, 177 insertions(+), 59 deletions(-) create mode 100644 changelog.d/9681.misc diff --git a/changelog.d/9681.misc b/changelog.d/9681.misc new file mode 100644 index 000000000..35338cd33 --- /dev/null +++ b/changelog.d/9681.misc @@ -0,0 +1 @@ +Add additional type hints to the Homeserver object. diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 6139881db..3df2aa5c2 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -787,13 +787,6 @@ class FederationSenderHandler: self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer") - def on_start(self): - # There may be some events that are persisted but haven't been sent, - # so send them now. - self.federation_sender.notify_new_events( - self.store.get_room_max_stream_ordering() - ) - def wake_destination(self, server: str): self.federation_sender.wake_destination(server) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 3e993b428..0c18c49ab 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -31,25 +31,39 @@ Events are replicated via a separate events stream. import logging from collections import namedtuple -from typing import Dict, List, Tuple, Type +from typing import ( + TYPE_CHECKING, + Dict, + Hashable, + Iterable, + List, + Optional, + Sized, + Tuple, + Type, +) from sortedcontainers import SortedDict -from twisted.internet import defer - from synapse.api.presence import UserPresenceState +from synapse.federation.sender import AbstractFederationSender, FederationSender from synapse.metrics import LaterGauge +from synapse.replication.tcp.streams.federation import FederationStream +from synapse.types import JsonDict, ReadReceipt, RoomStreamToken from synapse.util.metrics import Measure from .units import Edu +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) -class FederationRemoteSendQueue: +class FederationRemoteSendQueue(AbstractFederationSender): """A drop in replacement for FederationSender""" - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self.clock = hs.get_clock() self.notifier = hs.get_notifier() @@ -58,7 +72,7 @@ class FederationRemoteSendQueue: # We may have multiple federation sender instances, so we need to track # their positions separately. self._sender_instances = hs.config.worker.federation_shard_config.instances - self._sender_positions = {} + self._sender_positions = {} # type: Dict[str, int] # Pending presence map user_id -> UserPresenceState self.presence_map = {} # type: Dict[str, UserPresenceState] @@ -71,7 +85,7 @@ class FederationRemoteSendQueue: # Stream position -> (user_id, destinations) self.presence_destinations = ( SortedDict() - ) # type: SortedDict[int, Tuple[str, List[str]]] + ) # type: SortedDict[int, Tuple[str, Iterable[str]]] # (destination, key) -> EDU self.keyed_edu = {} # type: Dict[Tuple[str, tuple], Edu] @@ -94,7 +108,7 @@ class FederationRemoteSendQueue: # we make a new function, so we need to make a new function so the inner # lambda binds to the queue rather than to the name of the queue which # changes. ARGH. - def register(name, queue): + def register(name: str, queue: Sized) -> None: LaterGauge( "synapse_federation_send_queue_%s_size" % (queue_name,), "", @@ -115,13 +129,13 @@ class FederationRemoteSendQueue: self.clock.looping_call(self._clear_queue, 30 * 1000) - def _next_pos(self): + def _next_pos(self) -> int: pos = self.pos self.pos += 1 self.pos_time[self.clock.time_msec()] = pos return pos - def _clear_queue(self): + def _clear_queue(self) -> None: """Clear the queues for anything older than N minutes""" FIVE_MINUTES_AGO = 5 * 60 * 1000 @@ -138,7 +152,7 @@ class FederationRemoteSendQueue: self._clear_queue_before_pos(position_to_delete) - def _clear_queue_before_pos(self, position_to_delete): + def _clear_queue_before_pos(self, position_to_delete: int) -> None: """Clear all the queues from before a given position""" with Measure(self.clock, "send_queue._clear"): # Delete things out of presence maps @@ -188,13 +202,18 @@ class FederationRemoteSendQueue: for key in keys[:i]: del self.edus[key] - def notify_new_events(self, max_token): + def notify_new_events(self, max_token: RoomStreamToken) -> None: """As per FederationSender""" - # We don't need to replicate this as it gets sent down a different - # stream. - pass + # This should never get called. + raise NotImplementedError() - def build_and_send_edu(self, destination, edu_type, content, key=None): + def build_and_send_edu( + self, + destination: str, + edu_type: str, + content: JsonDict, + key: Optional[Hashable] = None, + ) -> None: """As per FederationSender""" if destination == self.server_name: logger.info("Not sending EDU to ourselves") @@ -218,38 +237,39 @@ class FederationRemoteSendQueue: self.notifier.on_new_replication_data() - def send_read_receipt(self, receipt): + async def send_read_receipt(self, receipt: ReadReceipt) -> None: """As per FederationSender Args: - receipt (synapse.types.ReadReceipt): + receipt: """ # nothing to do here: the replication listener will handle it. - return defer.succeed(None) - def send_presence(self, states): + def send_presence(self, states: List[UserPresenceState]) -> None: """As per FederationSender Args: - states (list(UserPresenceState)) + states """ pos = self._next_pos() # We only want to send presence for our own users, so lets always just # filter here just in case. - local_states = list(filter(lambda s: self.is_mine_id(s.user_id), states)) + local_states = [s for s in states if self.is_mine_id(s.user_id)] self.presence_map.update({state.user_id: state for state in local_states}) self.presence_changed[pos] = [state.user_id for state in local_states] self.notifier.on_new_replication_data() - def send_presence_to_destinations(self, states, destinations): + def send_presence_to_destinations( + self, states: Iterable[UserPresenceState], destinations: Iterable[str] + ) -> None: """As per FederationSender Args: - states (list[UserPresenceState]) - destinations (list[str]) + states + destinations """ for state in states: pos = self._next_pos() @@ -258,15 +278,18 @@ class FederationRemoteSendQueue: self.notifier.on_new_replication_data() - def send_device_messages(self, destination): + def send_device_messages(self, destination: str) -> None: """As per FederationSender""" # We don't need to replicate this as it gets sent down a different # stream. - def get_current_token(self): + def wake_destination(self, server: str) -> None: + pass + + def get_current_token(self) -> int: return self.pos - 1 - def federation_ack(self, instance_name, token): + def federation_ack(self, instance_name: str, token: int) -> None: if self._sender_instances: # If we have configured multiple federation sender instances we need # to track their positions separately, and only clear the queue up @@ -504,13 +527,16 @@ ParsedFederationStreamData = namedtuple( ) -def process_rows_for_federation(transaction_queue, rows): +def process_rows_for_federation( + transaction_queue: FederationSender, + rows: List[FederationStream.FederationStreamRow], +) -> None: """Parse a list of rows from the federation stream and put them in the transaction queue ready for sending to the relevant homeservers. Args: - transaction_queue (FederationSender) - rows (list(synapse.replication.tcp.streams.federation.FederationStream.FederationStreamRow)) + transaction_queue + rows """ # The federation stream contains a bunch of different types of diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 24ebc4b80..8babb1ebb 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc import logging -from typing import Dict, Hashable, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Set, Tuple from prometheus_client import Counter from twisted.internet import defer -import synapse import synapse.metrics from synapse.api.presence import UserPresenceState from synapse.events import EventBase @@ -40,9 +40,12 @@ from synapse.metrics import ( events_processed_counter, ) from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.types import ReadReceipt, RoomStreamToken +from synapse.types import JsonDict, ReadReceipt, RoomStreamToken from synapse.util.metrics import Measure, measure_func +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) sent_pdus_destination_dist_count = Counter( @@ -65,8 +68,91 @@ CATCH_UP_STARTUP_DELAY_SEC = 15 CATCH_UP_STARTUP_INTERVAL_SEC = 5 -class FederationSender: - def __init__(self, hs: "synapse.server.HomeServer"): +class AbstractFederationSender(metaclass=abc.ABCMeta): + @abc.abstractmethod + def notify_new_events(self, max_token: RoomStreamToken) -> None: + """This gets called when we have some new events we might want to + send out to other servers. + """ + raise NotImplementedError() + + @abc.abstractmethod + async def send_read_receipt(self, receipt: ReadReceipt) -> None: + """Send a RR to any other servers in the room + + Args: + receipt: receipt to be sent + """ + raise NotImplementedError() + + @abc.abstractmethod + def send_presence(self, states: List[UserPresenceState]) -> None: + """Send the new presence states to the appropriate destinations. + + This actually queues up the presence states ready for sending and + triggers a background task to process them and send out the transactions. + """ + raise NotImplementedError() + + @abc.abstractmethod + def send_presence_to_destinations( + self, states: Iterable[UserPresenceState], destinations: Iterable[str] + ) -> None: + """Send the given presence states to the given destinations. + + Args: + destinations: + """ + raise NotImplementedError() + + @abc.abstractmethod + def build_and_send_edu( + self, + destination: str, + edu_type: str, + content: JsonDict, + key: Optional[Hashable] = None, + ) -> None: + """Construct an Edu object, and queue it for sending + + Args: + destination: name of server to send to + edu_type: type of EDU to send + content: content of EDU + key: clobbering key for this edu + """ + raise NotImplementedError() + + @abc.abstractmethod + def send_device_messages(self, destination: str) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def wake_destination(self, destination: str) -> None: + """Called when we want to retry sending transactions to a remote. + + This is mainly useful if the remote server has been down and we think it + might have come back. + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_current_token(self) -> int: + raise NotImplementedError() + + @abc.abstractmethod + def federation_ack(self, instance_name: str, token: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + async def get_replication_rows( + self, instance_name: str, from_token: int, to_token: int, target_row_count: int + ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + raise NotImplementedError() + + +class FederationSender(AbstractFederationSender): + def __init__(self, hs: "HomeServer"): self.hs = hs self.server_name = hs.hostname @@ -432,7 +518,7 @@ class FederationSender: queue.flush_read_receipts_for_room(room_id) @preserve_fn # the caller should not yield on this - async def send_presence(self, states: List[UserPresenceState]): + async def send_presence(self, states: List[UserPresenceState]) -> None: """Send the new presence states to the appropriate destinations. This actually queues up the presence states ready for sending and @@ -494,7 +580,7 @@ class FederationSender: self._get_per_destination_queue(destination).send_presence(states) @measure_func("txnqueue._process_presence") - async def _process_presence_inner(self, states: List[UserPresenceState]): + async def _process_presence_inner(self, states: List[UserPresenceState]) -> None: """Given a list of states populate self.pending_presence_by_dest and poke to send a new transaction to each destination """ @@ -516,9 +602,9 @@ class FederationSender: self, destination: str, edu_type: str, - content: dict, + content: JsonDict, key: Optional[Hashable] = None, - ): + ) -> None: """Construct an Edu object, and queue it for sending Args: @@ -545,7 +631,7 @@ class FederationSender: self.send_edu(edu, key) - def send_edu(self, edu: Edu, key: Optional[Hashable]): + def send_edu(self, edu: Edu, key: Optional[Hashable]) -> None: """Queue an EDU for sending Args: @@ -563,7 +649,7 @@ class FederationSender: else: queue.send_edu(edu) - def send_device_messages(self, destination: str): + def send_device_messages(self, destination: str) -> None: if destination == self.server_name: logger.warning("Not sending device update to ourselves") return @@ -575,7 +661,7 @@ class FederationSender: self._get_per_destination_queue(destination).attempt_new_transaction() - def wake_destination(self, destination: str): + def wake_destination(self, destination: str) -> None: """Called when we want to retry sending transactions to a remote. This is mainly useful if the remote server has been down and we think it @@ -599,6 +685,10 @@ class FederationSender: # to a worker. return 0 + def federation_ack(self, instance_name: str, token: int) -> None: + # It is not expected that this gets called on FederationSender. + raise NotImplementedError() + @staticmethod async def get_replication_rows( instance_name: str, from_token: int, to_token: int, target_row_count: int @@ -607,7 +697,7 @@ class FederationSender: # to a worker. return [], 0, False - async def _wake_destinations_needing_catchup(self): + async def _wake_destinations_needing_catchup(self) -> None: """ Wakes up destinations that need catch-up and are not currently being backed off from. diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index bb447f75b..8abed1f52 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -312,16 +312,16 @@ class FederationAckCommand(Command): NAME = "FEDERATION_ACK" - def __init__(self, instance_name, token): + def __init__(self, instance_name: str, token: int): self.instance_name = instance_name self.token = token @classmethod - def from_line(cls, line): + def from_line(cls, line: str) -> "FederationAckCommand": instance_name, token = line.split(" ") return cls(instance_name, int(token)) - def to_line(self): + def to_line(self) -> str: return "%s %s" % (self.instance_name, self.token) diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py index 9bcd13b00..9bb8e9e17 100644 --- a/synapse/replication/tcp/streams/federation.py +++ b/synapse/replication/tcp/streams/federation.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from collections import namedtuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Tuple from synapse.replication.tcp.streams._base import ( Stream, @@ -21,6 +22,9 @@ from synapse.replication.tcp.streams._base import ( make_http_update_function, ) +if TYPE_CHECKING: + from synapse.server import HomeServer + class FederationStream(Stream): """Data to be sent over federation. Only available when master has federation @@ -38,7 +42,7 @@ class FederationStream(Stream): NAME = "federation" ROW_TYPE = FederationStreamRow - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): if hs.config.worker_app is None: # master process: get updates from the FederationRemoteSendQueue. # (if the master is configured to send federation itself, federation_sender @@ -48,7 +52,9 @@ class FederationStream(Stream): current_token = current_token_without_instance( federation_sender.get_current_token ) - update_function = federation_sender.get_replication_rows + update_function = ( + federation_sender.get_replication_rows + ) # type: Callable[[str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]]] elif hs.should_send_federation(): # federation sender: Query master process @@ -69,5 +75,7 @@ class FederationStream(Stream): return 0 @staticmethod - async def _stub_update_function(instance_name, from_token, upto_token, limit): + async def _stub_update_function( + instance_name: str, from_token: int, upto_token: int, limit: int + ) -> Tuple[list, int, bool]: return [], upto_token, False diff --git a/synapse/server.py b/synapse/server.py index 5e787e228..e85b9391f 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -60,7 +60,7 @@ from synapse.federation.federation_server import ( FederationServer, ) from synapse.federation.send_queue import FederationRemoteSendQueue -from synapse.federation.sender import FederationSender +from synapse.federation.sender import AbstractFederationSender, FederationSender from synapse.federation.transport.client import TransportLayerClient from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler @@ -571,7 +571,7 @@ class HomeServer(metaclass=abc.ABCMeta): return TransportLayerClient(self) @cache_in_self - def get_federation_sender(self): + def get_federation_sender(self) -> AbstractFederationSender: if self.should_send_federation(): return FederationSender(self) elif not self.config.worker_app: From 7dcf3fd2212b60b1b51bf9c699c4e58e8bd121fb Mon Sep 17 00:00:00 2001 From: blakehawkins Date: Mon, 29 Mar 2021 17:05:06 +0100 Subject: [PATCH 48/52] Clarify that register_new_matrix_user is present also when installed via non-pip package (#9074) Signed-off-by: blakehawkins blake.hawkins.11@gmail.com --- INSTALL.md | 22 ++++++++++++++++------ changelog.d/9074.doc | 1 + 2 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 changelog.d/9074.doc diff --git a/INSTALL.md b/INSTALL.md index 59318cb08..808243719 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -527,14 +527,24 @@ email will be disabled. The easiest way to create a new user is to do so from a client like [Element](https://element.io/). -Alternatively you can do so from the command line if you have installed via pip. +Alternatively, you can do so from the command line. This can be done as follows: -This can be done as follows: + 1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was + installed via a prebuilt package, `register_new_matrix_user` should already be + on the search path): + ```sh + cd ~/synapse + source env/bin/activate + synctl start # if not already running + ``` + 2. Run the following command: + ```sh + register_new_matrix_user -c homeserver.yaml http://localhost:8008 + ``` -```sh -$ source ~/synapse/env/bin/activate -$ synctl start # if not already running -$ register_new_matrix_user -c homeserver.yaml http://localhost:8008 +This will prompt you to add details for the new user, and will then connect to +the running Synapse to create the new user. For example: +``` New user localpart: erikj Password: Confirm password: diff --git a/changelog.d/9074.doc b/changelog.d/9074.doc new file mode 100644 index 000000000..57f7c97a6 --- /dev/null +++ b/changelog.d/9074.doc @@ -0,0 +1 @@ +Clarify that register_new_matrix_user is present also when installed via non-pip package. From 01dd90b0f08d1ffb36eb24282edc5db62a21170f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Mon, 29 Mar 2021 12:15:33 -0400 Subject: [PATCH 49/52] Add type hints to DictionaryCache and TTLCache. (#9442) --- changelog.d/9442.misc | 1 + .../http/federation/well_known_resolver.py | 10 +-- synapse/storage/databases/state/store.py | 9 +-- synapse/util/caches/dictionary_cache.py | 64 +++++++++++++------ synapse/util/caches/ttlcache.py | 53 ++++++++------- tests/storage/test_state.py | 22 +++---- tests/util/test_dict_cache.py | 4 +- 7 files changed, 96 insertions(+), 67 deletions(-) create mode 100644 changelog.d/9442.misc diff --git a/changelog.d/9442.misc b/changelog.d/9442.misc new file mode 100644 index 000000000..e7f0b6262 --- /dev/null +++ b/changelog.d/9442.misc @@ -0,0 +1 @@ +Add type hints to the caching module. diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index ecd63e659..ce4079f15 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -71,8 +71,10 @@ WELL_KNOWN_RETRY_ATTEMPTS = 3 logger = logging.getLogger(__name__) -_well_known_cache = TTLCache("well-known") -_had_valid_well_known_cache = TTLCache("had-valid-well-known") +_well_known_cache = TTLCache("well-known") # type: TTLCache[bytes, Optional[bytes]] +_had_valid_well_known_cache = TTLCache( + "had-valid-well-known" +) # type: TTLCache[bytes, bool] @attr.s(slots=True, frozen=True) @@ -88,8 +90,8 @@ class WellKnownResolver: reactor: IReactorTime, agent: IAgent, user_agent: bytes, - well_known_cache: Optional[TTLCache] = None, - had_well_known_cache: Optional[TTLCache] = None, + well_known_cache: Optional[TTLCache[bytes, Optional[bytes]]] = None, + had_well_known_cache: Optional[TTLCache[bytes, bool]] = None, ): self._reactor = reactor self._clock = Clock(reactor) diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index e2240703a..97ec65f75 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -183,12 +183,13 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): requests state from the cache, if False we need to query the DB for the missing state. """ - is_all, known_absent, state_dict_ids = cache.get(group) + cache_entry = cache.get(group) + state_dict_ids = cache_entry.value - if is_all or state_filter.is_full(): + if cache_entry.full or state_filter.is_full(): # Either we have everything or want everything, either way # `is_all` tells us whether we've gotten everything. - return state_filter.filter_state(state_dict_ids), is_all + return state_filter.filter_state(state_dict_ids), cache_entry.full # tracks whether any of our requested types are missing from the cache missing_types = False @@ -202,7 +203,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): # There aren't any wild cards, so `concrete_types()` returns the # complete list of event types we're wanting. for key in state_filter.concrete_types(): - if key not in state_dict_ids and key not in known_absent: + if key not in state_dict_ids and key not in cache_entry.known_absent: missing_types = True break diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index 588d2d49f..b3b413b02 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -15,26 +15,38 @@ import enum import logging import threading -from collections import namedtuple -from typing import Any +from typing import Any, Dict, Generic, Iterable, Optional, Set, TypeVar + +import attr from synapse.util.caches.lrucache import LruCache logger = logging.getLogger(__name__) -class DictionaryEntry(namedtuple("DictionaryEntry", ("full", "known_absent", "value"))): +# The type of the cache keys. +KT = TypeVar("KT") +# The type of the dictionary keys. +DKT = TypeVar("DKT") + + +@attr.s(slots=True) +class DictionaryEntry: """Returned when getting an entry from the cache Attributes: - full (bool): Whether the cache has the full or dict or just some keys. + full: Whether the cache has the full or dict or just some keys. If not full then not all requested keys will necessarily be present in `value` - known_absent (set): Keys that were looked up in the dict and were not + known_absent: Keys that were looked up in the dict and were not there. - value (dict): The full or partial dict value + value: The full or partial dict value """ + full = attr.ib(type=bool) + known_absent = attr.ib() + value = attr.ib() + def __len__(self): return len(self.value) @@ -45,21 +57,21 @@ class _Sentinel(enum.Enum): sentinel = object() -class DictionaryCache: +class DictionaryCache(Generic[KT, DKT]): """Caches key -> dictionary lookups, supporting caching partial dicts, i.e. fetching a subset of dictionary keys for a particular key. """ - def __init__(self, name, max_entries=1000): + def __init__(self, name: str, max_entries: int = 1000): self.cache = LruCache( max_size=max_entries, cache_name=name, size_callback=len - ) # type: LruCache[Any, DictionaryEntry] + ) # type: LruCache[KT, DictionaryEntry] self.name = name self.sequence = 0 - self.thread = None + self.thread = None # type: Optional[threading.Thread] - def check_thread(self): + def check_thread(self) -> None: expected_thread = self.thread if expected_thread is None: self.thread = threading.current_thread() @@ -69,12 +81,14 @@ class DictionaryCache: "Cache objects can only be accessed from the main thread" ) - def get(self, key, dict_keys=None): + def get( + self, key: KT, dict_keys: Optional[Iterable[DKT]] = None + ) -> DictionaryEntry: """Fetch an entry out of the cache Args: key - dict_key(list): If given a set of keys then return only those keys + dict_key: If given a set of keys then return only those keys that exist in the cache. Returns: @@ -95,7 +109,7 @@ class DictionaryCache: return DictionaryEntry(False, set(), {}) - def invalidate(self, key): + def invalidate(self, key: KT) -> None: self.check_thread() # Increment the sequence number so that any SELECT statements that @@ -103,19 +117,25 @@ class DictionaryCache: self.sequence += 1 self.cache.pop(key, None) - def invalidate_all(self): + def invalidate_all(self) -> None: self.check_thread() self.sequence += 1 self.cache.clear() - def update(self, sequence, key, value, fetched_keys=None): + def update( + self, + sequence: int, + key: KT, + value: Dict[DKT, Any], + fetched_keys: Optional[Set[DKT]] = None, + ) -> None: """Updates the entry in the cache Args: sequence - key (K) - value (dict[X,Y]): The value to update the cache with. - fetched_keys (None|set[X]): All of the dictionary keys which were + key + value: The value to update the cache with. + fetched_keys: All of the dictionary keys which were fetched from the database. If None, this is the complete value for key K. Otherwise, it @@ -131,7 +151,9 @@ class DictionaryCache: else: self._update_or_insert(key, value, fetched_keys) - def _update_or_insert(self, key, value, known_absent): + def _update_or_insert( + self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT] + ) -> None: # We pop and reinsert as we need to tell the cache the size may have # changed @@ -140,5 +162,5 @@ class DictionaryCache: entry.known_absent.update(known_absent) self.cache[key] = entry - def _insert(self, key, value, known_absent): + def _insert(self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT]) -> None: self.cache[key] = DictionaryEntry(True, known_absent, value) diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 6ce2a3d12..96a827494 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -15,6 +15,7 @@ import logging import time +from typing import Any, Callable, Dict, Generic, Tuple, TypeVar, Union import attr from sortedcontainers import SortedList @@ -23,15 +24,19 @@ from synapse.util.caches import register_cache logger = logging.getLogger(__name__) -SENTINEL = object() +SENTINEL = object() # type: Any + +T = TypeVar("T") +KT = TypeVar("KT") +VT = TypeVar("VT") -class TTLCache: +class TTLCache(Generic[KT, VT]): """A key/value cache implementation where each entry has its own TTL""" - def __init__(self, cache_name, timer=time.time): + def __init__(self, cache_name: str, timer: Callable[[], float] = time.time): # map from key to _CacheEntry - self._data = {} + self._data = {} # type: Dict[KT, _CacheEntry] # the _CacheEntries, sorted by expiry time self._expiry_list = SortedList() # type: SortedList[_CacheEntry] @@ -40,26 +45,27 @@ class TTLCache: self._metrics = register_cache("ttl", cache_name, self, resizable=False) - def set(self, key, value, ttl): + def set(self, key: KT, value: VT, ttl: float) -> None: """Add/update an entry in the cache Args: key: key for this entry value: value for this entry - ttl (float): TTL for this entry, in seconds + ttl: TTL for this entry, in seconds """ expiry = self._timer() + ttl self.expire() e = self._data.pop(key, SENTINEL) - if e != SENTINEL: + if e is not SENTINEL: + assert isinstance(e, _CacheEntry) self._expiry_list.remove(e) entry = _CacheEntry(expiry_time=expiry, ttl=ttl, key=key, value=value) self._data[key] = entry self._expiry_list.add(entry) - def get(self, key, default=SENTINEL): + def get(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: """Get a value from the cache Args: @@ -72,23 +78,23 @@ class TTLCache: """ self.expire() e = self._data.get(key, SENTINEL) - if e == SENTINEL: + if e is SENTINEL: self._metrics.inc_misses() - if default == SENTINEL: + if default is SENTINEL: raise KeyError(key) return default + assert isinstance(e, _CacheEntry) self._metrics.inc_hits() return e.value - def get_with_expiry(self, key): + def get_with_expiry(self, key: KT) -> Tuple[VT, float, float]: """Get a value, and its expiry time, from the cache Args: key: key to look up Returns: - Tuple[Any, float, float]: the value from the cache, the expiry time - and the TTL + A tuple of the value from the cache, the expiry time and the TTL Raises: KeyError if the entry is not found @@ -102,7 +108,7 @@ class TTLCache: self._metrics.inc_hits() return e.value, e.expiry_time, e.ttl - def pop(self, key, default=SENTINEL): + def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore """Remove a value from the cache If key is in the cache, remove it and return its value, else return default. @@ -118,29 +124,30 @@ class TTLCache: """ self.expire() e = self._data.pop(key, SENTINEL) - if e == SENTINEL: + if e is SENTINEL: self._metrics.inc_misses() - if default == SENTINEL: + if default is SENTINEL: raise KeyError(key) return default + assert isinstance(e, _CacheEntry) self._expiry_list.remove(e) self._metrics.inc_hits() return e.value - def __getitem__(self, key): + def __getitem__(self, key: KT) -> VT: return self.get(key) - def __delitem__(self, key): + def __delitem__(self, key: KT) -> None: self.pop(key) - def __contains__(self, key): + def __contains__(self, key: KT) -> bool: return key in self._data - def __len__(self): + def __len__(self) -> int: self.expire() return len(self._data) - def expire(self): + def expire(self) -> None: """Run the expiry on the cache. Any entries whose expiry times are due will be removed """ @@ -158,7 +165,7 @@ class _CacheEntry: """TTLCache entry""" # expiry_time is the first attribute, so that entries are sorted by expiry. - expiry_time = attr.ib() - ttl = attr.ib() + expiry_time = attr.ib(type=float) + ttl = attr.ib(type=float) key = attr.ib() value = attr.ib() diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 8bd12fa84..2471f1267 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -377,14 +377,11 @@ class StateStoreTestCase(tests.unittest.TestCase): ####################################################### # deliberately remove e2 (room name) from the _state_group_cache - ( - is_all, - known_absent, - state_dict_ids, - ) = self.state_datastore._state_group_cache.get(group) + cache_entry = self.state_datastore._state_group_cache.get(group) + state_dict_ids = cache_entry.value - self.assertEqual(is_all, True) - self.assertEqual(known_absent, set()) + self.assertEqual(cache_entry.full, True) + self.assertEqual(cache_entry.known_absent, set()) self.assertDictEqual( state_dict_ids, { @@ -403,14 +400,11 @@ class StateStoreTestCase(tests.unittest.TestCase): fetched_keys=((e1.type, e1.state_key),), ) - ( - is_all, - known_absent, - state_dict_ids, - ) = self.state_datastore._state_group_cache.get(group) + cache_entry = self.state_datastore._state_group_cache.get(group) + state_dict_ids = cache_entry.value - self.assertEqual(is_all, False) - self.assertEqual(known_absent, {(e1.type, e1.state_key)}) + self.assertEqual(cache_entry.full, False) + self.assertEqual(cache_entry.known_absent, {(e1.type, e1.state_key)}) self.assertDictEqual(state_dict_ids, {(e1.type, e1.state_key): e1.event_id}) ############################################ diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 34fdc9a43..2f41333f4 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -27,7 +27,9 @@ class DictCacheTestCase(unittest.TestCase): key = "test_simple_cache_hit_full" v = self.cache.get(key) - self.assertEqual((False, set(), {}), v) + self.assertIs(v.full, False) + self.assertEqual(v.known_absent, set()) + self.assertEqual({}, v.value) seq = self.cache.sequence test_value = {"test": "test_simple_cache_hit_full"} From f380bb77d19e7dd4ee6f61cd489e240ee4aa8fc2 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 30 Mar 2021 10:30:43 +0100 Subject: [PATCH 50/52] Use 'dmypy run' in lint.sh instead of 'mypy' (#9701) For it's obvious performance benefits. `dmypy` support landed in #9692. --- changelog.d/9701.misc | 1 + scripts-dev/lint.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/9701.misc diff --git a/changelog.d/9701.misc b/changelog.d/9701.misc new file mode 100644 index 000000000..49fa7b359 --- /dev/null +++ b/changelog.d/9701.misc @@ -0,0 +1 @@ +Use `dmypy run` in lint script for improved performance in type-checking while developing. \ No newline at end of file diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 9761e9759..41415ee07 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -95,4 +95,4 @@ isort "${files[@]}" python3 -m black "${files[@]}" ./scripts-dev/config-lint.sh flake8 "${files[@]}" -mypy +dmypy run From 78e48f61bf98099dea127535d4f427e5cc9ad34e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 30 Mar 2021 11:19:21 +0100 Subject: [PATCH 51/52] 1.31.0rc1 --- CHANGES.md | 64 ++++++++++++++++++++++++++++++++++++++++ changelog.d/9074.doc | 1 - changelog.d/9411.misc | 1 - changelog.d/9442.misc | 1 - changelog.d/9499.misc | 1 - changelog.d/9585.bugfix | 1 - changelog.d/9588.bugfix | 1 - changelog.d/9609.feature | 1 - changelog.d/9610.docker | 1 - changelog.d/9612.docker | 1 - changelog.d/9631.misc | 1 - changelog.d/9634.misc | 1 - changelog.d/9636.bugfix | 1 - changelog.d/9637.misc | 1 - changelog.d/9638.misc | 1 - changelog.d/9639.bugfix | 1 - changelog.d/9640.misc | 1 - changelog.d/9643.feature | 1 - changelog.d/9644.feature | 1 - changelog.d/9645.misc | 1 - changelog.d/9647.misc | 1 - changelog.d/9649.misc | 1 - changelog.d/9650.misc | 1 - changelog.d/9652.feature | 1 - changelog.d/9653.feature | 1 - changelog.d/9657.feature | 1 - changelog.d/9659.misc | 1 - changelog.d/9664.misc | 1 - changelog.d/9665.misc | 1 - changelog.d/9667.doc | 1 - changelog.d/9674.misc | 1 - changelog.d/9675.misc | 1 - changelog.d/9676.misc | 1 - changelog.d/9678.misc | 1 - changelog.d/9679.doc | 1 - changelog.d/9681.misc | 1 - changelog.d/9689.misc | 1 - changelog.d/9692.misc | 1 - changelog.d/9695.doc | 1 - changelog.d/9698.misc | 1 - changelog.d/9699.bugfix | 1 - changelog.d/9701.misc | 1 - changelog.d/9703.misc | 1 - changelog.d/9709.misc | 1 - synapse/__init__.py | 2 +- 45 files changed, 65 insertions(+), 44 deletions(-) delete mode 100644 changelog.d/9074.doc delete mode 100644 changelog.d/9411.misc delete mode 100644 changelog.d/9442.misc delete mode 100644 changelog.d/9499.misc delete mode 100644 changelog.d/9585.bugfix delete mode 100644 changelog.d/9588.bugfix delete mode 100644 changelog.d/9609.feature delete mode 100644 changelog.d/9610.docker delete mode 100644 changelog.d/9612.docker delete mode 100644 changelog.d/9631.misc delete mode 100644 changelog.d/9634.misc delete mode 100644 changelog.d/9636.bugfix delete mode 100644 changelog.d/9637.misc delete mode 100644 changelog.d/9638.misc delete mode 100644 changelog.d/9639.bugfix delete mode 100644 changelog.d/9640.misc delete mode 100644 changelog.d/9643.feature delete mode 100644 changelog.d/9644.feature delete mode 100644 changelog.d/9645.misc delete mode 100644 changelog.d/9647.misc delete mode 100644 changelog.d/9649.misc delete mode 100644 changelog.d/9650.misc delete mode 100644 changelog.d/9652.feature delete mode 100644 changelog.d/9653.feature delete mode 100644 changelog.d/9657.feature delete mode 100644 changelog.d/9659.misc delete mode 100644 changelog.d/9664.misc delete mode 100644 changelog.d/9665.misc delete mode 100644 changelog.d/9667.doc delete mode 100644 changelog.d/9674.misc delete mode 100644 changelog.d/9675.misc delete mode 100644 changelog.d/9676.misc delete mode 100644 changelog.d/9678.misc delete mode 100644 changelog.d/9679.doc delete mode 100644 changelog.d/9681.misc delete mode 100644 changelog.d/9689.misc delete mode 100644 changelog.d/9692.misc delete mode 100644 changelog.d/9695.doc delete mode 100644 changelog.d/9698.misc delete mode 100644 changelog.d/9699.bugfix delete mode 100644 changelog.d/9701.misc delete mode 100644 changelog.d/9703.misc delete mode 100644 changelog.d/9709.misc diff --git a/CHANGES.md b/CHANGES.md index 9b9a6263b..395deb40e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,67 @@ +Synapse 1.31.0rc1 (2021-03-30) +============================== + +Features +-------- + +- Add support to OpenID Connect login for requiring attributes on the `userinfo` response. Contributed by Hubbe King. ([\#9609](https://github.com/matrix-org/synapse/issues/9609)) +- Add initial experimental support for a "space summary" API. ([\#9643](https://github.com/matrix-org/synapse/issues/9643), [\#9652](https://github.com/matrix-org/synapse/issues/9652), [\#9653](https://github.com/matrix-org/synapse/issues/9653)) +- Add support for the busy presence state as described in [MSC3026](https://github.com/matrix-org/matrix-doc/pull/3026). ([\#9644](https://github.com/matrix-org/synapse/issues/9644)) +- Add support for credentials for proxy authentication in the `HTTPS_PROXY` environment variable. ([\#9657](https://github.com/matrix-org/synapse/issues/9657)) + + +Bugfixes +-------- + +- Fix a longstanding bug that could cause issues when editing a reply to a message. ([\#9585](https://github.com/matrix-org/synapse/issues/9585)) +- Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. ([\#9588](https://github.com/matrix-org/synapse/issues/9588)) +- Checks if passwords are allowed before setting it for the user. ([\#9636](https://github.com/matrix-org/synapse/issues/9636)) +- Fix a bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. ([\#9639](https://github.com/matrix-org/synapse/issues/9639)) +- Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. ([\#9699](https://github.com/matrix-org/synapse/issues/9699)) + + +Updates to the Docker image +--------------------------- + +- Speed up Docker builds and make it nicer to test against Complement while developing (install all dependencies before copying the project). ([\#9610](https://github.com/matrix-org/synapse/issues/9610)) +- Include [opencontainers labels](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys) in the Docker image. ([\#9612](https://github.com/matrix-org/synapse/issues/9612)) + + +Improved Documentation +---------------------- + +- Clarify that `register_new_matrix_user` is present also when installed via non-pip package. ([\#9074](https://github.com/matrix-org/synapse/issues/9074)) +- Update source install documentation to mention platform prerequisites before the source install steps. ([\#9667](https://github.com/matrix-org/synapse/issues/9667)) +- Improve worker documentation for fallback/web auth endpoints. ([\#9679](https://github.com/matrix-org/synapse/issues/9679)) +- Update the sample configuration for OIDC authentication. ([\#9695](https://github.com/matrix-org/synapse/issues/9695)) + + +Internal Changes +---------------- + +- Preparatory steps for removing redundant `outlier` data from `event_json.internal_metadata` column. ([\#9411](https://github.com/matrix-org/synapse/issues/9411)) +- Add type hints to the caching module. ([\#9442](https://github.com/matrix-org/synapse/issues/9442)) +- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9499](https://github.com/matrix-org/synapse/issues/9499), [\#9659](https://github.com/matrix-org/synapse/issues/9659)) +- Add additional type hints to the Homeserver object. ([\#9631](https://github.com/matrix-org/synapse/issues/9631), [\#9638](https://github.com/matrix-org/synapse/issues/9638), [\#9675](https://github.com/matrix-org/synapse/issues/9675), [\#9681](https://github.com/matrix-org/synapse/issues/9681)) +- Only save remote cross-signing and device keys if they're different from the current ones. ([\#9634](https://github.com/matrix-org/synapse/issues/9634)) +- Rename storage function to fix spelling and not conflict with another functions name. ([\#9637](https://github.com/matrix-org/synapse/issues/9637)) +- Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. ([\#9640](https://github.com/matrix-org/synapse/issues/9640), [\#9664](https://github.com/matrix-org/synapse/issues/9664)) +- In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. ([\#9645](https://github.com/matrix-org/synapse/issues/9645)) +- In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. ([\#9647](https://github.com/matrix-org/synapse/issues/9647)) +- Fixed some antipattern issues to improve code quality. ([\#9649](https://github.com/matrix-org/synapse/issues/9649)) +- Add a storage method for pulling all current user presence state from the database. ([\#9650](https://github.com/matrix-org/synapse/issues/9650)) +- Import `HomeServer` from the proper module. ([\#9665](https://github.com/matrix-org/synapse/issues/9665)) +- Increase default join ratelimiting burst rate. ([\#9674](https://github.com/matrix-org/synapse/issues/9674)) +- Add type hints to third party event rules and visibility modules. ([\#9676](https://github.com/matrix-org/synapse/issues/9676)) +- Bump mypy-zope to 0.2.13 to fix "Cannot determine consistent method resolution order (MRO)" errors when running mypy a second time. ([\#9678](https://github.com/matrix-org/synapse/issues/9678)) +- Use interpreter from `$PATH` via `/usr/bin/env` instead of absolute paths in various scripts. ([\#9689](https://github.com/matrix-org/synapse/issues/9689)) +- Make it possible to use `dmypy`. ([\#9692](https://github.com/matrix-org/synapse/issues/9692)) +- Suppress "CryptographyDeprecationWarning: int_from_bytes is deprecated". ([\#9698](https://github.com/matrix-org/synapse/issues/9698)) +- Use `dmypy run` in lint script for improved performance in type-checking while developing. ([\#9701](https://github.com/matrix-org/synapse/issues/9701)) +- Fix undetected mypy error when using Python 3.6. ([\#9703](https://github.com/matrix-org/synapse/issues/9703)) +- Fix type-checking CI on develop. ([\#9709](https://github.com/matrix-org/synapse/issues/9709)) + + Synapse 1.30.1 (2021-03-26) =========================== diff --git a/changelog.d/9074.doc b/changelog.d/9074.doc deleted file mode 100644 index 57f7c97a6..000000000 --- a/changelog.d/9074.doc +++ /dev/null @@ -1 +0,0 @@ -Clarify that register_new_matrix_user is present also when installed via non-pip package. diff --git a/changelog.d/9411.misc b/changelog.d/9411.misc deleted file mode 100644 index c3e6cfa5f..000000000 --- a/changelog.d/9411.misc +++ /dev/null @@ -1 +0,0 @@ -Preparatory steps for removing redundant `outlier` data from `event_json.internal_metadata` column. diff --git a/changelog.d/9442.misc b/changelog.d/9442.misc deleted file mode 100644 index e7f0b6262..000000000 --- a/changelog.d/9442.misc +++ /dev/null @@ -1 +0,0 @@ -Add type hints to the caching module. diff --git a/changelog.d/9499.misc b/changelog.d/9499.misc deleted file mode 100644 index 428a466fa..000000000 --- a/changelog.d/9499.misc +++ /dev/null @@ -1 +0,0 @@ -Introduce flake8-bugbear to the test suite and fix some of its lint violations. diff --git a/changelog.d/9585.bugfix b/changelog.d/9585.bugfix deleted file mode 100644 index de472ddfd..000000000 --- a/changelog.d/9585.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a longstanding bug that could cause issues when editing a reply to a message. \ No newline at end of file diff --git a/changelog.d/9588.bugfix b/changelog.d/9588.bugfix deleted file mode 100644 index b8d614056..000000000 --- a/changelog.d/9588.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. diff --git a/changelog.d/9609.feature b/changelog.d/9609.feature deleted file mode 100644 index f3b634206..000000000 --- a/changelog.d/9609.feature +++ /dev/null @@ -1 +0,0 @@ -Logins using OpenID Connect can require attributes on the `userinfo` response in order to login. Contributed by Hubbe King. diff --git a/changelog.d/9610.docker b/changelog.d/9610.docker deleted file mode 100644 index 056252a66..000000000 --- a/changelog.d/9610.docker +++ /dev/null @@ -1 +0,0 @@ -Speed up Docker builds and make it nicer to test against Complement while developing (install all dependencies before copying the project). diff --git a/changelog.d/9612.docker b/changelog.d/9612.docker deleted file mode 100644 index d95c503c8..000000000 --- a/changelog.d/9612.docker +++ /dev/null @@ -1 +0,0 @@ -Include [opencontainers labels](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys) in the Docker image. diff --git a/changelog.d/9631.misc b/changelog.d/9631.misc deleted file mode 100644 index 35338cd33..000000000 --- a/changelog.d/9631.misc +++ /dev/null @@ -1 +0,0 @@ -Add additional type hints to the Homeserver object. diff --git a/changelog.d/9634.misc b/changelog.d/9634.misc deleted file mode 100644 index 59ac42cb8..000000000 --- a/changelog.d/9634.misc +++ /dev/null @@ -1 +0,0 @@ -Only save remote cross-signing and device keys if they're different from the current ones. diff --git a/changelog.d/9636.bugfix b/changelog.d/9636.bugfix deleted file mode 100644 index fa772ed6f..000000000 --- a/changelog.d/9636.bugfix +++ /dev/null @@ -1 +0,0 @@ -Checks if passwords are allowed before setting it for the user. \ No newline at end of file diff --git a/changelog.d/9637.misc b/changelog.d/9637.misc deleted file mode 100644 index 90a27d9f8..000000000 --- a/changelog.d/9637.misc +++ /dev/null @@ -1 +0,0 @@ -Rename storage function to fix spelling and not conflict with another functions name. diff --git a/changelog.d/9638.misc b/changelog.d/9638.misc deleted file mode 100644 index 35338cd33..000000000 --- a/changelog.d/9638.misc +++ /dev/null @@ -1 +0,0 @@ -Add additional type hints to the Homeserver object. diff --git a/changelog.d/9639.bugfix b/changelog.d/9639.bugfix deleted file mode 100644 index 51b374670..000000000 --- a/changelog.d/9639.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. diff --git a/changelog.d/9640.misc b/changelog.d/9640.misc deleted file mode 100644 index 3d410ed4c..000000000 --- a/changelog.d/9640.misc +++ /dev/null @@ -1 +0,0 @@ -Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. diff --git a/changelog.d/9643.feature b/changelog.d/9643.feature deleted file mode 100644 index 2f7ccedcf..000000000 --- a/changelog.d/9643.feature +++ /dev/null @@ -1 +0,0 @@ -Add initial experimental support for a "space summary" API. diff --git a/changelog.d/9644.feature b/changelog.d/9644.feature deleted file mode 100644 index 556bcf0f9..000000000 --- a/changelog.d/9644.feature +++ /dev/null @@ -1 +0,0 @@ -Implement the busy presence state as described in [MSC3026](https://github.com/matrix-org/matrix-doc/pull/3026). diff --git a/changelog.d/9645.misc b/changelog.d/9645.misc deleted file mode 100644 index 9a7ce364c..000000000 --- a/changelog.d/9645.misc +++ /dev/null @@ -1 +0,0 @@ -In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. diff --git a/changelog.d/9647.misc b/changelog.d/9647.misc deleted file mode 100644 index 303a8c660..000000000 --- a/changelog.d/9647.misc +++ /dev/null @@ -1 +0,0 @@ -In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. diff --git a/changelog.d/9649.misc b/changelog.d/9649.misc deleted file mode 100644 index 58c5fd053..000000000 --- a/changelog.d/9649.misc +++ /dev/null @@ -1 +0,0 @@ -Fixed some antipattern issues to improve code quality. diff --git a/changelog.d/9650.misc b/changelog.d/9650.misc deleted file mode 100644 index d830ead70..000000000 --- a/changelog.d/9650.misc +++ /dev/null @@ -1 +0,0 @@ -Add a storage method for pulling all current user presence state from the database. \ No newline at end of file diff --git a/changelog.d/9652.feature b/changelog.d/9652.feature deleted file mode 100644 index 2f7ccedcf..000000000 --- a/changelog.d/9652.feature +++ /dev/null @@ -1 +0,0 @@ -Add initial experimental support for a "space summary" API. diff --git a/changelog.d/9653.feature b/changelog.d/9653.feature deleted file mode 100644 index 2f7ccedcf..000000000 --- a/changelog.d/9653.feature +++ /dev/null @@ -1 +0,0 @@ -Add initial experimental support for a "space summary" API. diff --git a/changelog.d/9657.feature b/changelog.d/9657.feature deleted file mode 100644 index c56a615a8..000000000 --- a/changelog.d/9657.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for credentials for proxy authentication in the `HTTPS_PROXY` environment variable. diff --git a/changelog.d/9659.misc b/changelog.d/9659.misc deleted file mode 100644 index 428a466fa..000000000 --- a/changelog.d/9659.misc +++ /dev/null @@ -1 +0,0 @@ -Introduce flake8-bugbear to the test suite and fix some of its lint violations. diff --git a/changelog.d/9664.misc b/changelog.d/9664.misc deleted file mode 100644 index 3d410ed4c..000000000 --- a/changelog.d/9664.misc +++ /dev/null @@ -1 +0,0 @@ -Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. diff --git a/changelog.d/9665.misc b/changelog.d/9665.misc deleted file mode 100644 index b8bf76c63..000000000 --- a/changelog.d/9665.misc +++ /dev/null @@ -1 +0,0 @@ -Import `HomeServer` from the proper module. diff --git a/changelog.d/9667.doc b/changelog.d/9667.doc deleted file mode 100644 index dec4816b4..000000000 --- a/changelog.d/9667.doc +++ /dev/null @@ -1 +0,0 @@ -Update source install documentation to mention platform prerequisites before the source install steps. \ No newline at end of file diff --git a/changelog.d/9674.misc b/changelog.d/9674.misc deleted file mode 100644 index c82fde61b..000000000 --- a/changelog.d/9674.misc +++ /dev/null @@ -1 +0,0 @@ -Increase default join ratelimiting burst rate. diff --git a/changelog.d/9675.misc b/changelog.d/9675.misc deleted file mode 100644 index 35338cd33..000000000 --- a/changelog.d/9675.misc +++ /dev/null @@ -1 +0,0 @@ -Add additional type hints to the Homeserver object. diff --git a/changelog.d/9676.misc b/changelog.d/9676.misc deleted file mode 100644 index 829e38b93..000000000 --- a/changelog.d/9676.misc +++ /dev/null @@ -1 +0,0 @@ -Add type hints to third party event rules and visibility modules. diff --git a/changelog.d/9678.misc b/changelog.d/9678.misc deleted file mode 100644 index 77a2b2d43..000000000 --- a/changelog.d/9678.misc +++ /dev/null @@ -1 +0,0 @@ -Bump mypy-zope to 0.2.13 to fix "Cannot determine consistent method resolution order (MRO)" errors when running mypy a second time. diff --git a/changelog.d/9679.doc b/changelog.d/9679.doc deleted file mode 100644 index 34f87490d..000000000 --- a/changelog.d/9679.doc +++ /dev/null @@ -1 +0,0 @@ -Improve worker documentation for fallback/web auth endpoints. diff --git a/changelog.d/9681.misc b/changelog.d/9681.misc deleted file mode 100644 index 35338cd33..000000000 --- a/changelog.d/9681.misc +++ /dev/null @@ -1 +0,0 @@ -Add additional type hints to the Homeserver object. diff --git a/changelog.d/9689.misc b/changelog.d/9689.misc deleted file mode 100644 index a08d3482a..000000000 --- a/changelog.d/9689.misc +++ /dev/null @@ -1 +0,0 @@ -Use interpreter from `$PATH` via `/usr/bin/env` instead of absolute paths in various scripts. diff --git a/changelog.d/9692.misc b/changelog.d/9692.misc deleted file mode 100644 index d02002586..000000000 --- a/changelog.d/9692.misc +++ /dev/null @@ -1 +0,0 @@ -Make it possible to use `dmypy`. diff --git a/changelog.d/9695.doc b/changelog.d/9695.doc deleted file mode 100644 index cf82e68a8..000000000 --- a/changelog.d/9695.doc +++ /dev/null @@ -1 +0,0 @@ -Update the sample configuration for OIDC authentication. diff --git a/changelog.d/9698.misc b/changelog.d/9698.misc deleted file mode 100644 index d199e846c..000000000 --- a/changelog.d/9698.misc +++ /dev/null @@ -1 +0,0 @@ -Suppress "CryptographyDeprecationWarning: int_from_bytes is deprecated". diff --git a/changelog.d/9699.bugfix b/changelog.d/9699.bugfix deleted file mode 100644 index e871825b3..000000000 --- a/changelog.d/9699.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. diff --git a/changelog.d/9701.misc b/changelog.d/9701.misc deleted file mode 100644 index 49fa7b359..000000000 --- a/changelog.d/9701.misc +++ /dev/null @@ -1 +0,0 @@ -Use `dmypy run` in lint script for improved performance in type-checking while developing. \ No newline at end of file diff --git a/changelog.d/9703.misc b/changelog.d/9703.misc deleted file mode 100644 index 8dda73922..000000000 --- a/changelog.d/9703.misc +++ /dev/null @@ -1 +0,0 @@ -Fix undetected mypy error when using Python 3.6. \ No newline at end of file diff --git a/changelog.d/9709.misc b/changelog.d/9709.misc deleted file mode 100644 index 10542fdf3..000000000 --- a/changelog.d/9709.misc +++ /dev/null @@ -1 +0,0 @@ -Fix type-checking CI on develop. \ No newline at end of file diff --git a/synapse/__init__.py b/synapse/__init__.py index c9bc8fb9e..419299bf0 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -48,7 +48,7 @@ try: except ImportError: pass -__version__ = "1.30.1" +__version__ = "1.31.0rc1" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 3a446c21f81ee1c33225b499972dce30882056e2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 30 Mar 2021 11:29:21 +0100 Subject: [PATCH 52/52] Update changelog --- CHANGES.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 395deb40e..ff84a556a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,10 @@ Synapse 1.31.0rc1 (2021-03-30) ============================== +**Note:** As announced in v1.25.0, and in line with the deprecation policy for platform dependencies, this is the last release to support Python 3.5 and PostgreSQL 9.5. Future versions of Synapse will require Python 3.6+ and PostgreSQL 9.6+. + +This is also the last release that the Synapse team will be publishing packages for Debian Stretch and Ubuntu Xenial. + Features -------- @@ -15,7 +19,7 @@ Bugfixes - Fix a longstanding bug that could cause issues when editing a reply to a message. ([\#9585](https://github.com/matrix-org/synapse/issues/9585)) - Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. ([\#9588](https://github.com/matrix-org/synapse/issues/9588)) -- Checks if passwords are allowed before setting it for the user. ([\#9636](https://github.com/matrix-org/synapse/issues/9636)) +- Check if local passwords are enabled before setting them for the user. ([\#9636](https://github.com/matrix-org/synapse/issues/9636)) - Fix a bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. ([\#9639](https://github.com/matrix-org/synapse/issues/9639)) - Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. ([\#9699](https://github.com/matrix-org/synapse/issues/9699)) @@ -44,8 +48,8 @@ Internal Changes - Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9499](https://github.com/matrix-org/synapse/issues/9499), [\#9659](https://github.com/matrix-org/synapse/issues/9659)) - Add additional type hints to the Homeserver object. ([\#9631](https://github.com/matrix-org/synapse/issues/9631), [\#9638](https://github.com/matrix-org/synapse/issues/9638), [\#9675](https://github.com/matrix-org/synapse/issues/9675), [\#9681](https://github.com/matrix-org/synapse/issues/9681)) - Only save remote cross-signing and device keys if they're different from the current ones. ([\#9634](https://github.com/matrix-org/synapse/issues/9634)) -- Rename storage function to fix spelling and not conflict with another functions name. ([\#9637](https://github.com/matrix-org/synapse/issues/9637)) -- Improve performance of federation catch up by sending events the latest events in the room to the remote, rather than just the last event sent by the local server. ([\#9640](https://github.com/matrix-org/synapse/issues/9640), [\#9664](https://github.com/matrix-org/synapse/issues/9664)) +- Rename storage function to fix spelling and not conflict with another function's name. ([\#9637](https://github.com/matrix-org/synapse/issues/9637)) +- Improve performance of federation catch up by sending the latest events in the room to the remote, rather than just the last event sent by the local server. ([\#9640](https://github.com/matrix-org/synapse/issues/9640), [\#9664](https://github.com/matrix-org/synapse/issues/9664)) - In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. ([\#9645](https://github.com/matrix-org/synapse/issues/9645)) - In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. ([\#9647](https://github.com/matrix-org/synapse/issues/9647)) - Fixed some antipattern issues to improve code quality. ([\#9649](https://github.com/matrix-org/synapse/issues/9649))