Merge remote-tracking branch 'upstream/release-v1.26.0'

This commit is contained in:
Tulir Asokan 2021-01-25 15:48:21 +02:00
commit 15da8fa424
15 changed files with 149 additions and 23 deletions

View File

@ -1,8 +1,25 @@
Synapse 1.26.0rc2 (2021-01-25)
==============================
Bugfixes
--------
- Fix receipts and account data not being sent down sync. Introduced in v1.26.0rc1. ([\#9193](https://github.com/matrix-org/synapse/issues/9193), [\#9195](https://github.com/matrix-org/synapse/issues/9195))
- Fix chain cover update to handle events with duplicate auth events. Introduced in v1.26.0rc1. ([\#9210](https://github.com/matrix-org/synapse/issues/9210))
Internal Changes
----------------
- Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration. ([\#9189](https://github.com/matrix-org/synapse/issues/9189))
- Bump minimum `psycopg2` version to v2.8. ([\#9204](https://github.com/matrix-org/synapse/issues/9204))
Synapse 1.26.0rc1 (2021-01-20)
==============================
This release brings a new schema version for Synapse and rolling back to a previous
verious is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
on these changes and for general upgrade guidance.
Features

View File

@ -286,7 +286,7 @@ We recommend using the demo which starts 3 federated instances running on ports
(to stop, you can use `./demo/stop.sh`)
If you just want to start a single instance of the app and run it directly:
If you just want to start a single instance of the app and run it directly::
# Create the homeserver.yaml config once
python -m synapse.app.homeserver \

View File

@ -1728,7 +1728,9 @@ saml2_config:
#
# idp_icon: An optional icon for this identity provider, which is presented
# by identity picker pages. If given, must be an MXC URI of the format
# mxc://<server-name>/<media-id>
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
# is to upload an image to an (unencrypted) room and then copy the "url"
# from the source of the event.)
#
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
# to discover endpoints. Defaults to true.
@ -1814,13 +1816,16 @@ saml2_config:
#
# For backwards compatibility, it is also possible to configure a single OIDC
# provider via an 'oidc_config' setting. This is now deprecated and admins are
# advised to migrate to the 'oidc_providers' format.
# advised to migrate to the 'oidc_providers' format. (When doing that migration,
# use 'oidc' for the idp_id to ensure that existing users continue to be
# recognised.)
#
oidc_providers:
# Generic example
#
#- idp_id: my_idp
# idp_name: "My OpenID provider"
# idp_icon: "mxc://example.com/mediaid"
# discover: false
# issuer: "https://accounts.example.com/"
# client_id: "provided-by-your-issuer"
@ -1844,8 +1849,8 @@ oidc_providers:
# For use with Github
#
#- idp_id: google
# idp_name: Google
#- idp_id: github
# idp_name: Github
# discover: false
# issuer: "https://github.com/"
# client_id: "your-client-id" # TO BE FILLED

View File

@ -48,7 +48,7 @@ try:
except ImportError:
pass
__version__ = "1.26.0rc1"
__version__ = "1.26.0rc2"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when

View File

@ -69,7 +69,9 @@ class OIDCConfig(Config):
#
# idp_icon: An optional icon for this identity provider, which is presented
# by identity picker pages. If given, must be an MXC URI of the format
# mxc://<server-name>/<media-id>
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
# is to upload an image to an (unencrypted) room and then copy the "url"
# from the source of the event.)
#
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
# to discover endpoints. Defaults to true.
@ -155,13 +157,16 @@ class OIDCConfig(Config):
#
# For backwards compatibility, it is also possible to configure a single OIDC
# provider via an 'oidc_config' setting. This is now deprecated and admins are
# advised to migrate to the 'oidc_providers' format.
# advised to migrate to the 'oidc_providers' format. (When doing that migration,
# use 'oidc' for the idp_id to ensure that existing users continue to be
# recognised.)
#
oidc_providers:
# Generic example
#
#- idp_id: my_idp
# idp_name: "My OpenID provider"
# idp_icon: "mxc://example.com/mediaid"
# discover: false
# issuer: "https://accounts.example.com/"
# client_id: "provided-by-your-issuer"
@ -185,8 +190,8 @@ class OIDCConfig(Config):
# For use with Github
#
#- idp_id: google
# idp_name: Google
#- idp_id: github
# idp_name: Github
# discover: false
# issuer: "https://github.com/"
# client_id: "your-client-id" # TO BE FILLED
@ -210,6 +215,8 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
"type": "object",
"required": ["issuer", "client_id", "client_secret"],
"properties": {
# TODO: fix the maxLength here depending on what MSC2528 decides
# remember that we prefix the ID given here with `oidc-`
"idp_id": {"type": "string", "minLength": 1, "maxLength": 128},
"idp_name": {"type": "string"},
"idp_icon": {"type": "string"},
@ -335,6 +342,8 @@ def _parse_oidc_config_dict(
# enforce those limits now.
# TODO: factor out this stuff to a generic function
idp_id = oidc_config.get("idp_id", "oidc")
# TODO: update this validity check based on what MSC2858 decides.
valid_idp_chars = set(string.ascii_lowercase + string.digits + "-._")
if any(c not in valid_idp_chars for c in idp_id):
@ -348,6 +357,17 @@ def _parse_oidc_config_dict(
"idp_id must start with a-z", config_path + ("idp_id",),
)
# prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
# clashes with other mechs (such as SAML, CAS).
#
# We allow "oidc" as an exception so that people migrating from old-style
# "oidc_config" format (which has long used "oidc" as its idp_id) can migrate to
# a new-style "oidc_providers" entry without changing the idp_id for their provider
# (and thereby invalidating their user_external_ids data).
if idp_id != "oidc":
idp_id = "oidc-" + idp_id
# MSC2858 also specifies that the idp_icon must be a valid MXC uri
idp_icon = oidc_config.get("idp_icon")
if idp_icon is not None:

View File

@ -86,8 +86,8 @@ REQUIREMENTS = [
CONDITIONAL_REQUIREMENTS = {
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
# we use execute_batch, which arrived in psycopg 2.7.
"postgres": ["psycopg2>=2.7"],
# we use execute_values with the fetch param, which arrived in psycopg 2.8.
"postgres": ["psycopg2>=2.8"],
# ACME support is required to provision TLS certificates from authorities
# that use the protocol, such as Let's Encrypt.
"acme": [

View File

@ -68,7 +68,7 @@ class AccountDataWorkerStore(SQLBaseStore):
# `StreamIdGenerator`, otherwise we use `SlavedIdTracker` which gets
# updated over replication. (Multiple writers are not supported for
# SQLite).
if hs.get_instance_name() in hs.config.worker.writers.events:
if hs.get_instance_name() in hs.config.worker.writers.account_data:
self._account_data_id_gen = StreamIdGenerator(
db_conn,
"room_account_data",

View File

@ -45,7 +45,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
self._receipts_id_gen = MultiWriterIdGenerator(
db_conn=db_conn,
db=database,
stream_name="account_data",
stream_name="receipts",
instance_name=self._instance_name,
tables=[("receipts_linearized", "instance_name", "stream_id")],
sequence_name="receipts_sequence",
@ -61,7 +61,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
# `StreamIdGenerator`, otherwise we use `SlavedIdTracker` which gets
# updated over replication. (Multiple writers are not supported for
# SQLite).
if hs.get_instance_name() in hs.config.worker.writers.events:
if hs.get_instance_name() in hs.config.worker.writers.receipts:
self._receipts_id_gen = StreamIdGenerator(
db_conn, "receipts_linearized", "stream_id"
)

View File

@ -0,0 +1,18 @@
/* Copyright 2021 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- We incorrectly populated these, so we delete them and let the
-- MultiWriterIdGenerator repopulate it.
DELETE FROM stream_positions WHERE stream_name = 'receipts' OR stream_name = 'account_data';

View File

@ -261,7 +261,11 @@ class MultiWriterIdGenerator:
# We check that the table and sequence haven't diverged.
for table, _, id_column in tables:
self._sequence_gen.check_consistency(
db_conn, table=table, id_column=id_column, positive=positive
db_conn,
table=table,
id_column=id_column,
stream_name=stream_name,
positive=positive,
)
# This goes and fills out the above state from the database.

View File

@ -45,6 +45,21 @@ and run the following SQL:
See docs/postgres.md for more information.
"""
_INCONSISTENT_STREAM_ERROR = """
Postgres sequence '%(seq)s' is inconsistent with associated stream position
of '%(stream_name)s' in the 'stream_positions' table.
This is likely a programming error and should be reported at
https://github.com/matrix-org/synapse.
A temporary workaround to fix this error is to shut down Synapse (including
any and all workers) and run the following SQL:
DELETE FROM stream_positions WHERE stream_name = '%(stream_name)s';
This will need to be done every time the server is restarted.
"""
class SequenceGenerator(metaclass=abc.ABCMeta):
"""A class which generates a unique sequence of integers"""
@ -60,14 +75,20 @@ class SequenceGenerator(metaclass=abc.ABCMeta):
db_conn: "LoggingDatabaseConnection",
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
"""Should be called during start up to test that the current value of
the sequence is greater than or equal to the maximum ID in the table.
This is to handle various cases where the sequence value can get out
of sync with the table, e.g. if Synapse gets rolled back to a previous
This is to handle various cases where the sequence value can get out of
sync with the table, e.g. if Synapse gets rolled back to a previous
version and the rolled forwards again.
If a stream name is given then this will check that any value in the
`stream_positions` table is less than or equal to the current sequence
value. If it isn't then it's likely that streams have been crossed
somewhere (e.g. two ID generators have the same stream name).
"""
...
@ -93,8 +114,12 @@ class PostgresSequenceGenerator(SequenceGenerator):
db_conn: "LoggingDatabaseConnection",
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
"""See SequenceGenerator.check_consistency for docstring.
"""
txn = db_conn.cursor(txn_name="sequence.check_consistency")
# First we get the current max ID from the table.
@ -118,6 +143,18 @@ class PostgresSequenceGenerator(SequenceGenerator):
"SELECT last_value, is_called FROM %(seq)s" % {"seq": self._sequence_name}
)
last_value, is_called = txn.fetchone()
# If we have an associated stream check the stream_positions table.
max_in_stream_positions = None
if stream_name:
txn.execute(
"SELECT MAX(stream_id) FROM stream_positions WHERE stream_name = ?",
(stream_name,),
)
row = txn.fetchone()
if row:
max_in_stream_positions = row[0]
txn.close()
# If `is_called` is False then `last_value` is actually the value that
@ -138,6 +175,14 @@ class PostgresSequenceGenerator(SequenceGenerator):
% {"seq": self._sequence_name, "table": table, "max_id_sql": table_sql}
)
# If we have values in the stream positions table then they have to be
# less than or equal to `last_value`
if max_in_stream_positions and max_in_stream_positions > last_value:
raise IncorrectDatabaseSetup(
_INCONSISTENT_STREAM_ERROR
% {"seq": self._sequence_name, "stream_name": stream_name}
)
GetFirstCallbackType = Callable[[Cursor], int]
@ -175,7 +220,12 @@ class LocalSequenceGenerator(SequenceGenerator):
return self._current_max_id
def check_consistency(
self, db_conn: Connection, table: str, id_column: str, positive: bool = True
self,
db_conn: Connection,
table: str,
id_column: str,
stream_name: Optional[str] = None,
positive: bool = True,
):
# There is nothing to do for in memory sequences
pass

View File

@ -78,7 +78,7 @@ def sorted_topologically(
if node not in degree_map:
continue
for edge in edges:
for edge in set(edges):
if edge in degree_map:
degree_map[node] += 1

View File

@ -446,7 +446,7 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
p.feed(channel.result["body"].decode("utf-8"))
p.close()
self.assertCountEqual(p.radios["idp"], ["cas", "oidc", "idp1", "saml"])
self.assertCountEqual(p.radios["idp"], ["cas", "oidc", "oidc-idp1", "saml"])
self.assertEqual(p.hiddens["redirectUrl"], TEST_CLIENT_REDIRECT_URL)

View File

@ -92,3 +92,15 @@ class SortTopologically(TestCase):
# Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should
# always get the same one.
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
def test_duplicates(self):
"Test that a graph with duplicate edges work"
graph = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} # type: Dict[int, List[int]]
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
def test_multiple_paths(self):
"Test that a graph with multiple paths between two nodes work"
graph = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} # type: Dict[int, List[int]]
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])

View File

@ -117,7 +117,7 @@ commands =
# Make all greater-thans equals so we test the oldest version of our direct
# dependencies, but make the pyopenssl 17.0, which can work against an
# OpenSSL 1.1 compiled cryptography (as older ones don't compile on Travis).
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "s/psycopg2==2.6//" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "/psycopg2/d" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'
# Install Synapse itself. This won't update any libraries.
pip install -e ".[test]"