diff --git a/CHANGES.md b/CHANGES.md index 85c565a76..2d2474108 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,49 @@ +# Synapse 1.108.0rc1 (2024-05-21) + +### Features + +- Add a feature that allows clients to query the configured federation whitelist. Disabled by default. ([\#16848](https://github.com/element-hq/synapse/issues/16848), [\#17199](https://github.com/element-hq/synapse/issues/17199)) +- Add the ability to allow numeric user IDs with a specific prefix when in the CAS flow. Contributed by Aurélien Grimpard. ([\#17098](https://github.com/element-hq/synapse/issues/17098)) + +### Bugfixes + +- Fix bug where push rules would be empty in `/sync` for some accounts. Introduced in v1.93.0. ([\#17142](https://github.com/element-hq/synapse/issues/17142)) +- Add support for optional whitespace around the Federation API's `Authorization` header's parameter commas. ([\#17145](https://github.com/element-hq/synapse/issues/17145)) +- Fix bug where disabling room publication prevented public rooms being created on workers. ([\#17177](https://github.com/element-hq/synapse/issues/17177), [\#17184](https://github.com/element-hq/synapse/issues/17184)) + +### Improved Documentation + +- Document [`/v1/make_knock`](https://spec.matrix.org/v1.10/server-server-api/#get_matrixfederationv1make_knockroomiduserid) and [`/v1/send_knock/`](https://spec.matrix.org/v1.10/server-server-api/#put_matrixfederationv1send_knockroomideventid) federation endpoints as worker-compatible. ([\#17058](https://github.com/element-hq/synapse/issues/17058)) +- Update User Admin API with note about prefixing OIDC external_id providers. ([\#17139](https://github.com/element-hq/synapse/issues/17139)) +- Clarify the state of the created room when using the `autocreate_auto_join_room_preset` config option. ([\#17150](https://github.com/element-hq/synapse/issues/17150)) +- Update the Admin FAQ with the current libjemalloc version for latest Debian stable. Additionally update the name of the "push_rules" stream in the Workers documentation. ([\#17171](https://github.com/element-hq/synapse/issues/17171)) + +### Internal Changes + +- Add note to reflect that [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) is closed but will remain supported for some time. ([\#17151](https://github.com/element-hq/synapse/issues/17151)) +- Update dependency PyO3 to 0.21. ([\#17162](https://github.com/element-hq/synapse/issues/17162)) +- Fixes linter errors found in PR #17147. ([\#17166](https://github.com/element-hq/synapse/issues/17166)) +- Bump black from 24.2.0 to 24.4.2. ([\#17170](https://github.com/element-hq/synapse/issues/17170)) +- Cache literal sync filter validation for performance. ([\#17186](https://github.com/element-hq/synapse/issues/17186)) +- Improve performance by fixing a reactor pause. ([\#17192](https://github.com/element-hq/synapse/issues/17192)) +- Route `/make_knock` and `/send_knock` federation APIs to the federation reader worker in Complement test runs. ([\#17195](https://github.com/element-hq/synapse/issues/17195)) +- Prepare sync handler to be able to return different sync responses (`SyncVersion`). ([\#17200](https://github.com/element-hq/synapse/issues/17200)) +- Organize the sync cache key parameter outside of the sync config (separate concerns). ([\#17201](https://github.com/element-hq/synapse/issues/17201)) +- Refactor `SyncResultBuilder` assembly to its own function. ([\#17202](https://github.com/element-hq/synapse/issues/17202)) +- Rename to be obvious: `joined_rooms` -> `joined_room_ids`. ([\#17203](https://github.com/element-hq/synapse/issues/17203), [\#17208](https://github.com/element-hq/synapse/issues/17208)) +- Add a short pause when rate-limiting a request. ([\#17210](https://github.com/element-hq/synapse/issues/17210)) + + + +### Updates to locked dependencies + +* Bump cryptography from 42.0.5 to 42.0.7. ([\#17180](https://github.com/element-hq/synapse/issues/17180)) +* Bump gitpython from 3.1.41 to 3.1.43. ([\#17181](https://github.com/element-hq/synapse/issues/17181)) +* Bump immutabledict from 4.1.0 to 4.2.0. ([\#17179](https://github.com/element-hq/synapse/issues/17179)) +* Bump sentry-sdk from 1.40.3 to 2.1.1. ([\#17178](https://github.com/element-hq/synapse/issues/17178)) +* Bump serde from 1.0.200 to 1.0.201. ([\#17183](https://github.com/element-hq/synapse/issues/17183)) +* Bump serde_json from 1.0.116 to 1.0.117. ([\#17182](https://github.com/element-hq/synapse/issues/17182)) + Synapse 1.107.0 (2024-05-14) ============================ diff --git a/Cargo.lock b/Cargo.lock index ceda2789e..d6f900013 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,30 +4,30 @@ version = 3 [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "anyhow" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" [[package]] name = "arc-swap" -version = "1.5.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "autocfg" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "base64" @@ -37,9 +37,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "bitflags" -version = "1.3.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] name = "blake2" @@ -52,9 +52,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] @@ -115,9 +115,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -125,9 +125,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", @@ -191,15 +191,15 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "indoc" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "itoa" -version = "1.0.4" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" @@ -218,15 +218,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.153" +version = "0.2.154" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -240,15 +240,15 @@ checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "memchr" -version = "2.6.3" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "memoffset" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] @@ -261,15 +261,15 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "once_cell" -version = "1.15.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" dependencies = [ "lock_api", "parking_lot_core", @@ -277,15 +277,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.3" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-targets", ] [[package]] @@ -302,18 +302,18 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" dependencies = [ "unicode-ident", ] [[package]] name = "pyo3" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" +checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8" dependencies = [ "anyhow", "cfg-if", @@ -330,9 +330,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" +checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50" dependencies = [ "once_cell", "target-lexicon", @@ -340,9 +340,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" +checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403" dependencies = [ "libc", "pyo3-build-config", @@ -350,9 +350,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd" +checksum = "2af49834b8d2ecd555177e63b273b708dea75150abc6f5341d0a6e1a9623976c" dependencies = [ "arc-swap", "log", @@ -361,9 +361,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" +checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -373,9 +373,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" +checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c" dependencies = [ "heck", "proc-macro2", @@ -386,9 +386,9 @@ dependencies = [ [[package]] name = "pythonize" -version = "0.20.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffd1c3ef39c725d63db5f9bc455461bafd80540cb7824c61afb823501921a850" +checksum = "9d0664248812c38cc55a4ed07f88e4df516ce82604b93b1ffdc041aa77a6cb3c" dependencies = [ "pyo3", "serde", @@ -396,9 +396,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] @@ -435,9 +435,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" dependencies = [ "bitflags", ] @@ -456,9 +456,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", @@ -467,36 +467,36 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "ryu" -version = "1.0.11" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.200" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc6f9cc94d67c0e21aaf7eda3a010fd3af78ebf6e096aa6e2e13c79749cce4f" +checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.200" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "856f046b9400cee3c8c94ed572ecdb752444c24528c035cd35882aad6f492bcb" +checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" dependencies = [ "proc-macro2", "quote", @@ -505,9 +505,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.116" +version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "itoa", "ryu", @@ -516,9 +516,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -538,21 +538,21 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "subtle" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "2.0.48" +version = "2.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "c993ed8ccba56ae856363b1845da7266a7cb78e1d146c8a32d54b45a8b831fc9" dependencies = [ "proc-macro2", "quote", @@ -585,15 +585,15 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.4" +version = "0.12.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1" +checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" [[package]] name = "typenum" -version = "1.15.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ulid" @@ -608,9 +608,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.5" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unindent" @@ -695,44 +695,65 @@ dependencies = [ ] [[package]] -name = "windows-sys" -version = "0.36.1" +name = "windows-targets" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ + "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", + "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", + "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] -name = "windows_aarch64_msvc" -version = "0.36.1" +name = "windows_aarch64_gnullvm" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" -version = "0.36.1" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" -version = "0.36.1" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" -version = "0.36.1" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" -version = "0.36.1" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" diff --git a/debian/changelog b/debian/changelog index d228c1cc8..a9a5011f7 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.108.0~rc1) stable; urgency=medium + + * New Synapse release 1.108.0rc1. + + -- Synapse Packaging team Tue, 21 May 2024 10:54:13 +0100 + matrix-synapse-py3 (1.107.0) stable; urgency=medium * New Synapse release 1.107.0. diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 77534a4f4..063f3727f 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -211,6 +211,8 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { "^/_matrix/federation/(v1|v2)/make_leave/", "^/_matrix/federation/(v1|v2)/send_join/", "^/_matrix/federation/(v1|v2)/send_leave/", + "^/_matrix/federation/v1/make_knock/", + "^/_matrix/federation/v1/send_knock/", "^/_matrix/federation/(v1|v2)/invite/", "^/_matrix/federation/(v1|v2)/query_auth/", "^/_matrix/federation/(v1|v2)/event_auth/", diff --git a/docs/admin_api/user_admin_api.md b/docs/admin_api/user_admin_api.md index 9736fe302..228138583 100644 --- a/docs/admin_api/user_admin_api.md +++ b/docs/admin_api/user_admin_api.md @@ -141,8 +141,8 @@ Body parameters: provider for SSO (Single sign-on). More details are in the configuration manual under the sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers). - `auth_provider` - **string**, required. The unique, internal ID of the external identity provider. - The same as `idp_id` from the homeserver configuration. Note that no error is raised if the - provided value is not in the homeserver configuration. + The same as `idp_id` from the homeserver configuration. If using OIDC, this value should be prefixed + with `oidc-`. Note that no error is raised if the provided value is not in the homeserver configuration. - `external_id` - **string**, required. An identifier for the user in the external identity provider. When the user logs in to the identity provider, this must be the unique ID that they map to. - `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator, diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 0d98f73fb..a1184d037 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -250,10 +250,10 @@ Using [libjemalloc](https://jemalloc.net) can also yield a significant improvement in overall memory use, and especially in terms of giving back RAM to the OS. To use it, the library must simply be put in the LD_PRELOAD environment variable when launching Synapse. On Debian, this -can be done by installing the `libjemalloc1` package and adding this +can be done by installing the `libjemalloc2` package and adding this line to `/etc/default/matrix-synapse`: - LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1 + LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2 This made a significant difference on Python 2.7 - it's unclear how much of an improvement it provides on Python 3.x. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 0c582d038..e04fdfdfb 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -1232,6 +1232,31 @@ federation_domain_whitelist: - syd.example.com ``` --- +### `federation_whitelist_endpoint_enabled` + +Enables an endpoint for fetching the federation whitelist config. + +The request method and path is `GET /_synapse/client/v1/config/federation_whitelist`, and the +response format is: + +```json +{ + "whitelist_enabled": true, // Whether the federation whitelist is being enforced + "whitelist": [ // Which server names are allowed by the whitelist + "example.com" + ] +} +``` + +If `whitelist_enabled` is `false` then the server is permitted to federate with all others. + +The endpoint requires authentication. + +Example configuration: +```yaml +federation_whitelist_endpoint_enabled: true +``` +--- ### `federation_metrics_domains` Report prometheus metrics on the age of PDUs being sent to and received from @@ -2591,6 +2616,11 @@ Possible values for this option are: * "trusted_private_chat": an invitation is required to join this room and the invitee is assigned a power level of 100 upon joining the room. +Each preset will set up a room in the same manner as if it were provided as the `preset` parameter when +calling the +[`POST /_matrix/client/v3/createRoom`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) +Client-Server API endpoint. + If a value of "private_chat" or "trusted_private_chat" is used then `auto_join_mxid_localpart` must also be configured. @@ -3528,6 +3558,15 @@ Has the following sub-options: users. This allows the CAS SSO flow to be limited to sign in only, rather than automatically registering users that have a valid SSO login but do not have a pre-registered account. Defaults to true. +* `allow_numeric_ids`: set to 'true' allow numeric user IDs (default false). + This allows CAS SSO flow to provide user IDs composed of numbers only. + These identifiers will be prefixed by the letter "u" by default. + The prefix can be configured using the "numeric_ids_prefix" option. + Be careful to choose the prefix correctly to avoid any possible conflicts + (e.g. user 1234 becomes u1234 when a user u1234 already exists). +* `numeric_ids_prefix`: the prefix you wish to add in front of a numeric user ID + when the "allow_numeric_ids" option is set to "true". + By default, the prefix is the letter "u" and only alphanumeric characters are allowed. *Added in Synapse 1.93.0.* @@ -3542,6 +3581,8 @@ cas_config: userGroup: "staff" department: None enable_registration: true + allow_numeric_ids: true + numeric_ids_prefix: "numericuser" ``` --- ### `sso` diff --git a/docs/workers.md b/docs/workers.md index 82f4bfc1d..1f6bfd9e7 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -211,6 +211,8 @@ information. ^/_matrix/federation/v1/make_leave/ ^/_matrix/federation/(v1|v2)/send_join/ ^/_matrix/federation/(v1|v2)/send_leave/ + ^/_matrix/federation/v1/make_knock/ + ^/_matrix/federation/v1/send_knock/ ^/_matrix/federation/(v1|v2)/invite/ ^/_matrix/federation/v1/event_auth/ ^/_matrix/federation/v1/timestamp_to_event/ @@ -535,7 +537,7 @@ the stream writer for the `presence` stream: ##### The `push_rules` stream The following endpoints should be routed directly to the worker configured as -the stream writer for the `push` stream: +the stream writer for the `push_rules` stream: ^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/ diff --git a/poetry.lock b/poetry.lock index 502f45f8b..8537f3752 100644 --- a/poetry.lock +++ b/poetry.lock @@ -107,33 +107,33 @@ typecheck = ["mypy"] [[package]] name = "black" -version = "24.2.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, - {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, - {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, - {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, - {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, - {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, - {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, - {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, - {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, - {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, - {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, - {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, - {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, - {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, - {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, - {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, - {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, - {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, - {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, - {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, - {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, - {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -403,43 +403,43 @@ files = [ [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, + {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, + {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, + {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, + {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, + {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, + {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, ] [package.dependencies] @@ -524,20 +524,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.41" +version = "3.1.43" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] name = "hiredis" @@ -782,13 +783,13 @@ files = [ [[package]] name = "immutabledict" -version = "4.1.0" +version = "4.2.0" description = "Immutable wrapper around dictionaries (a fork of frozendict)" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "immutabledict-4.1.0-py3-none-any.whl", hash = "sha256:c176e99aa90aedb81716ad35218bb2055d049b549626db4523dbe011cf2f32ac"}, - {file = "immutabledict-4.1.0.tar.gz", hash = "sha256:93d100ccd2cd09a1fd3f136b9328c6e59529ba341de8bb499437f6819159fe8a"}, + {file = "immutabledict-4.2.0-py3-none-any.whl", hash = "sha256:d728b2c2410d698d95e6200237feb50a695584d20289ad3379a439aa3d90baba"}, + {file = "immutabledict-4.2.0.tar.gz", hash = "sha256:e003fd81aad2377a5a758bf7e1086cf3b70b63e9a5cc2f46bce8d0a2b4727c5f"}, ] [[package]] @@ -2398,26 +2399,28 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "sentry-sdk" -version = "1.40.3" +version = "2.1.1" description = "Python client for Sentry (https://sentry.io)" optional = true -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "sentry-sdk-1.40.3.tar.gz", hash = "sha256:3c2b027979bb400cd65a47970e64f8cef8acda86b288a27f42a98692505086cd"}, - {file = "sentry_sdk-1.40.3-py2.py3-none-any.whl", hash = "sha256:73383f28311ae55602bb6cc3b013830811135ba5521e41333a6e68f269413502"}, + {file = "sentry_sdk-2.1.1-py2.py3-none-any.whl", hash = "sha256:99aeb78fb76771513bd3b2829d12613130152620768d00cd3e45ac00cb17950f"}, + {file = "sentry_sdk-2.1.1.tar.gz", hash = "sha256:95d8c0bb41c8b0bc37ab202c2c4a295bb84398ee05f4cdce55051cd75b926ec1"}, ] [package.dependencies] certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} +urllib3 = ">=1.26.11" [package.extras] aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] arq = ["arq (>=0.23)"] asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] chalice = ["chalice (>=1.16.0)"] clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] @@ -2427,7 +2430,10 @@ flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +huggingface-hub = ["huggingface-hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] pure-eval = ["asttokens", "executing", "pure-eval"] diff --git a/pyproject.toml b/pyproject.toml index dd4521ff7..00366ebb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.107.0" +version = "1.108.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" diff --git a/rust/Cargo.toml b/rust/Cargo.toml index d41a216d1..026487275 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -30,14 +30,14 @@ http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" mime = "0.3.17" -pyo3 = { version = "0.20.0", features = [ +pyo3 = { version = "0.21.0", features = [ "macros", "anyhow", "abi3", "abi3-py38", ] } -pyo3-log = "0.9.0" -pythonize = "0.20.0" +pyo3-log = "0.10.0" +pythonize = "0.21.0" regex = "1.6.0" sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } diff --git a/rust/src/acl/mod.rs b/rust/src/acl/mod.rs index 286574fb4..982720ba9 100644 --- a/rust/src/acl/mod.rs +++ b/rust/src/acl/mod.rs @@ -25,21 +25,21 @@ use std::net::Ipv4Addr; use std::str::FromStr; use anyhow::Error; -use pyo3::prelude::*; +use pyo3::{prelude::*, pybacked::PyBackedStr}; use regex::Regex; use crate::push::utils::{glob_to_regex, GlobMatchType}; /// Called when registering modules with python. -pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { - let child_module = PyModule::new(py, "acl")?; +pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { + let child_module = PyModule::new_bound(py, "acl")?; child_module.add_class::()?; - m.add_submodule(child_module)?; + m.add_submodule(&child_module)?; // We need to manually add the module to sys.modules to make `from // synapse.synapse_rust import acl` work. - py.import("sys")? + py.import_bound("sys")? .getattr("modules")? .set_item("synapse.synapse_rust.acl", child_module)?; @@ -59,8 +59,8 @@ impl ServerAclEvaluator { #[new] pub fn py_new( allow_ip_literals: bool, - allow: Vec<&str>, - deny: Vec<&str>, + allow: Vec, + deny: Vec, ) -> Result { let allow = allow .iter() diff --git a/rust/src/events/internal_metadata.rs b/rust/src/events/internal_metadata.rs index 53c7b1ba6..63774fbd5 100644 --- a/rust/src/events/internal_metadata.rs +++ b/rust/src/events/internal_metadata.rs @@ -38,9 +38,10 @@ use anyhow::Context; use log::warn; use pyo3::{ exceptions::PyAttributeError, + pybacked::PyBackedStr, pyclass, pymethods, - types::{PyDict, PyString}, - IntoPy, PyAny, PyObject, PyResult, Python, + types::{PyAnyMethods, PyDict, PyDictMethods, PyString}, + Bound, IntoPy, PyAny, PyObject, PyResult, Python, }; /// Definitions of the various fields of the internal metadata. @@ -59,7 +60,7 @@ enum EventInternalMetadataData { impl EventInternalMetadataData { /// Convert the field to its name and python object. - fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a PyString, PyObject) { + fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a Bound<'a, PyString>, PyObject) { match self { EventInternalMetadataData::OutOfBandMembership(o) => { (pyo3::intern!(py, "out_of_band_membership"), o.into_py(py)) @@ -90,10 +91,13 @@ impl EventInternalMetadataData { /// Converts from python key/values to the field. /// /// Returns `None` if the key is a valid but unrecognized string. - fn from_python_pair(key: &PyAny, value: &PyAny) -> PyResult> { - let key_str: &str = key.extract()?; + fn from_python_pair( + key: &Bound<'_, PyAny>, + value: &Bound<'_, PyAny>, + ) -> PyResult> { + let key_str: PyBackedStr = key.extract()?; - let e = match key_str { + let e = match &*key_str { "out_of_band_membership" => EventInternalMetadataData::OutOfBandMembership( value .extract() @@ -210,11 +214,11 @@ pub struct EventInternalMetadata { #[pymethods] impl EventInternalMetadata { #[new] - fn new(dict: &PyDict) -> PyResult { + fn new(dict: &Bound<'_, PyDict>) -> PyResult { let mut data = Vec::with_capacity(dict.len()); for (key, value) in dict.iter() { - match EventInternalMetadataData::from_python_pair(key, value) { + match EventInternalMetadataData::from_python_pair(&key, &value) { Ok(Some(entry)) => data.push(entry), Ok(None) => {} Err(err) => { @@ -240,7 +244,7 @@ impl EventInternalMetadata { /// /// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here. fn get_dict(&self, py: Python<'_>) -> PyResult { - let dict = PyDict::new(py); + let dict = PyDict::new_bound(py); for entry in &self.data { let (key, value) = entry.to_python_pair(py); diff --git a/rust/src/events/mod.rs b/rust/src/events/mod.rs index ee857b3d7..a4ade1a17 100644 --- a/rust/src/events/mod.rs +++ b/rust/src/events/mod.rs @@ -20,20 +20,23 @@ //! Classes for representing Events. -use pyo3::{types::PyModule, PyResult, Python}; +use pyo3::{ + types::{PyAnyMethods, PyModule, PyModuleMethods}, + Bound, PyResult, Python, +}; mod internal_metadata; /// Called when registering modules with python. -pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { - let child_module = PyModule::new(py, "events")?; +pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { + let child_module = PyModule::new_bound(py, "events")?; child_module.add_class::()?; - m.add_submodule(child_module)?; + m.add_submodule(&child_module)?; // We need to manually add the module to sys.modules to make `from // synapse.synapse_rust import events` work. - py.import("sys")? + py.import_bound("sys")? .getattr("modules")? .set_item("synapse.synapse_rust.events", child_module)?; diff --git a/rust/src/http.rs b/rust/src/http.rs index 74098f4c8..af052ab72 100644 --- a/rust/src/http.rs +++ b/rust/src/http.rs @@ -17,8 +17,8 @@ use headers::{Header, HeaderMapExt}; use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri}; use pyo3::{ exceptions::PyValueError, - types::{PyBytes, PySequence, PyTuple}, - PyAny, PyResult, + types::{PyAnyMethods, PyBytes, PyBytesMethods, PySequence, PyTuple}, + Bound, PyAny, PyResult, }; use crate::errors::SynapseError; @@ -28,10 +28,11 @@ use crate::errors::SynapseError; /// # Errors /// /// Returns an error if calling the `read` on the Python object failed -fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult { +fn read_io_body(body: &Bound<'_, PyAny>, chunk_size: usize) -> PyResult { let mut buf = BytesMut::new(); loop { - let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?; + let bound = &body.call_method1("read", (chunk_size,))?; + let bytes: &Bound<'_, PyBytes> = bound.downcast()?; if bytes.as_bytes().is_empty() { return Ok(buf.into()); } @@ -50,17 +51,19 @@ fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult { /// # Errors /// /// Returns an error if the Python object doesn't properly implement `IRequest` -pub fn http_request_from_twisted(request: &PyAny) -> PyResult> { +pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult> { let content = request.getattr("content")?; - let body = read_io_body(content, 4096)?; + let body = read_io_body(&content, 4096)?; let mut req = Request::new(body); - let uri: &PyBytes = request.getattr("uri")?.downcast()?; + let bound = &request.getattr("uri")?; + let uri: &Bound<'_, PyBytes> = bound.downcast()?; *req.uri_mut() = Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?; - let method: &PyBytes = request.getattr("method")?.downcast()?; + let bound = &request.getattr("method")?; + let method: &Bound<'_, PyBytes> = bound.downcast()?; *req.method_mut() = Method::from_bytes(method.as_bytes()) .map_err(|_| PyValueError::new_err("invalid method"))?; @@ -71,14 +74,17 @@ pub fn http_request_from_twisted(request: &PyAny) -> PyResult> { for header in headers_iter { let header = header?; - let header: &PyTuple = header.downcast()?; - let name: &PyBytes = header.get_item(0)?.downcast()?; + let header: &Bound<'_, PyTuple> = header.downcast()?; + let bound = &header.get_item(0)?; + let name: &Bound<'_, PyBytes> = bound.downcast()?; let name = HeaderName::from_bytes(name.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header name"))?; - let values: &PySequence = header.get_item(1)?.downcast()?; + let bound = &header.get_item(1)?; + let values: &Bound<'_, PySequence> = bound.downcast()?; for index in 0..values.len()? { - let value: &PyBytes = values.get_item(index)?.downcast()?; + let bound = &values.get_item(index)?; + let value: &Bound<'_, PyBytes> = bound.downcast()?; let value = HeaderValue::from_bytes(value.as_bytes()) .map_err(|_| PyValueError::new_err("invalid header value"))?; req.headers_mut().append(name.clone(), value); @@ -100,7 +106,10 @@ pub fn http_request_from_twisted(request: &PyAny) -> PyResult> { /// # Errors /// /// Returns an error if the Python object doesn't properly implement `IRequest` -pub fn http_response_to_twisted(request: &PyAny, response: Response) -> PyResult<()> +pub fn http_response_to_twisted( + request: &Bound<'_, PyAny>, + response: Response, +) -> PyResult<()> where B: Buf, { diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 9bd1f17ad..06477880b 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -38,7 +38,7 @@ fn reset_logging_config() { /// The entry point for defining the Python module. #[pymodule] -fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> { +fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_function(wrap_pyfunction!(sum_as_string, m)?)?; m.add_function(wrap_pyfunction!(get_rust_file_digest, m)?)?; m.add_function(wrap_pyfunction!(reset_logging_config, m)?)?; diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index 7dedbf10b..2a452b69a 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -66,7 +66,7 @@ use log::warn; use pyo3::exceptions::PyTypeError; use pyo3::prelude::*; use pyo3::types::{PyBool, PyList, PyLong, PyString}; -use pythonize::{depythonize, pythonize}; +use pythonize::{depythonize_bound, pythonize}; use serde::de::Error as _; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -78,19 +78,19 @@ pub mod evaluator; pub mod utils; /// Called when registering modules with python. -pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { - let child_module = PyModule::new(py, "push")?; +pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { + let child_module = PyModule::new_bound(py, "push")?; child_module.add_class::()?; child_module.add_class::()?; child_module.add_class::()?; child_module.add_class::()?; child_module.add_function(wrap_pyfunction!(get_base_rule_ids, m)?)?; - m.add_submodule(child_module)?; + m.add_submodule(&child_module)?; // We need to manually add the module to sys.modules to make `from // synapse.synapse_rust import push` work. - py.import("sys")? + py.import_bound("sys")? .getattr("modules")? .set_item("synapse.synapse_rust.push", child_module)?; @@ -271,12 +271,12 @@ pub enum SimpleJsonValue { impl<'source> FromPyObject<'source> for SimpleJsonValue { fn extract(ob: &'source PyAny) -> PyResult { - if let Ok(s) = ::try_from(ob) { + if let Ok(s) = ob.downcast::() { Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string()))) // A bool *is* an int, ensure we try bool first. - } else if let Ok(b) = ::try_from(ob) { + } else if let Ok(b) = ob.downcast::() { Ok(SimpleJsonValue::Bool(b.extract()?)) - } else if let Ok(i) = ::try_from(ob) { + } else if let Ok(i) = ob.downcast::() { Ok(SimpleJsonValue::Int(i.extract()?)) } else if ob.is_none() { Ok(SimpleJsonValue::Null) @@ -299,7 +299,7 @@ pub enum JsonValue { impl<'source> FromPyObject<'source> for JsonValue { fn extract(ob: &'source PyAny) -> PyResult { - if let Ok(l) = ::try_from(ob) { + if let Ok(l) = ob.downcast::() { match l.iter().map(SimpleJsonValue::extract).collect() { Ok(a) => Ok(JsonValue::Array(a)), Err(e) => Err(PyTypeError::new_err(format!( @@ -370,8 +370,8 @@ impl IntoPy for Condition { } impl<'source> FromPyObject<'source> for Condition { - fn extract(ob: &'source PyAny) -> PyResult { - Ok(depythonize(ob)?) + fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { + Ok(depythonize_bound(ob.clone())?) } } diff --git a/rust/src/rendezvous/mod.rs b/rust/src/rendezvous/mod.rs index c0f5d8b60..f69f45490 100644 --- a/rust/src/rendezvous/mod.rs +++ b/rust/src/rendezvous/mod.rs @@ -26,8 +26,10 @@ use headers::{ use http::{header::ETAG, HeaderMap, Response, StatusCode, Uri}; use mime::Mime; use pyo3::{ - exceptions::PyValueError, pyclass, pymethods, types::PyModule, Py, PyAny, PyObject, PyResult, - Python, ToPyObject, + exceptions::PyValueError, + pyclass, pymethods, + types::{PyAnyMethods, PyModule, PyModuleMethods}, + Bound, Py, PyAny, PyObject, PyResult, Python, ToPyObject, }; use ulid::Ulid; @@ -109,7 +111,7 @@ impl RendezvousHandler { #[pyo3(signature = (homeserver, /, capacity=100, max_content_length=4*1024, eviction_interval=60*1000, ttl=60*1000))] fn new( py: Python<'_>, - homeserver: &PyAny, + homeserver: &Bound<'_, PyAny>, capacity: usize, max_content_length: u64, eviction_interval: u64, @@ -150,7 +152,7 @@ impl RendezvousHandler { } fn _evict(&mut self, py: Python<'_>) -> PyResult<()> { - let clock = self.clock.as_ref(py); + let clock = self.clock.bind(py); let now: u64 = clock.call_method0("time_msec")?.extract()?; let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); self.evict(now); @@ -158,12 +160,12 @@ impl RendezvousHandler { Ok(()) } - fn handle_post(&mut self, py: Python<'_>, twisted_request: &PyAny) -> PyResult<()> { + fn handle_post(&mut self, py: Python<'_>, twisted_request: &Bound<'_, PyAny>) -> PyResult<()> { let request = http_request_from_twisted(twisted_request)?; let content_type = self.check_input_headers(request.headers())?; - let clock = self.clock.as_ref(py); + let clock = self.clock.bind(py); let now: u64 = clock.call_method0("time_msec")?.extract()?; let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); @@ -197,7 +199,12 @@ impl RendezvousHandler { Ok(()) } - fn handle_get(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + fn handle_get( + &mut self, + py: Python<'_>, + twisted_request: &Bound<'_, PyAny>, + id: &str, + ) -> PyResult<()> { let request = http_request_from_twisted(twisted_request)?; let if_none_match: Option = request.headers().typed_get_optional()?; @@ -233,7 +240,12 @@ impl RendezvousHandler { Ok(()) } - fn handle_put(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + fn handle_put( + &mut self, + py: Python<'_>, + twisted_request: &Bound<'_, PyAny>, + id: &str, + ) -> PyResult<()> { let request = http_request_from_twisted(twisted_request)?; let content_type = self.check_input_headers(request.headers())?; @@ -281,7 +293,7 @@ impl RendezvousHandler { Ok(()) } - fn handle_delete(&mut self, twisted_request: &PyAny, id: &str) -> PyResult<()> { + fn handle_delete(&mut self, twisted_request: &Bound<'_, PyAny>, id: &str) -> PyResult<()> { let _request = http_request_from_twisted(twisted_request)?; let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; @@ -298,16 +310,16 @@ impl RendezvousHandler { } } -pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { - let child_module = PyModule::new(py, "rendezvous")?; +pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { + let child_module = PyModule::new_bound(py, "rendezvous")?; child_module.add_class::()?; - m.add_submodule(child_module)?; + m.add_submodule(&child_module)?; // We need to manually add the module to sys.modules to make `from // synapse.synapse_rust import rendezvous` work. - py.import("sys")? + py.import_bound("sys")? .getattr("modules")? .set_item("synapse.synapse_rust.rendezvous", child_module)?; diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index a73626bc8..a99a9e09f 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -316,6 +316,10 @@ class Ratelimiter: ) if not allowed: + # We pause for a bit here to stop clients from "tight-looping" on + # retrying their request. + await self.clock.sleep(0.5) + raise LimitExceededError( limiter_name=self._limiter_name, retry_after_ms=int(1000 * (time_allowed - time_now_s)), diff --git a/synapse/config/cas.py b/synapse/config/cas.py index d23dcf96b..fa59c350c 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -66,6 +66,17 @@ class CasConfig(Config): self.cas_enable_registration = cas_config.get("enable_registration", True) + self.cas_allow_numeric_ids = cas_config.get("allow_numeric_ids") + self.cas_numeric_ids_prefix = cas_config.get("numeric_ids_prefix") + if ( + self.cas_numeric_ids_prefix is not None + and self.cas_numeric_ids_prefix.isalnum() is False + ): + raise ConfigError( + "Only alphanumeric characters are allowed for numeric IDs prefix", + ("cas_config", "numeric_ids_prefix"), + ) + self.idp_name = cas_config.get("idp_name", "CAS") self.idp_icon = cas_config.get("idp_icon") self.idp_brand = cas_config.get("idp_brand") @@ -77,6 +88,8 @@ class CasConfig(Config): self.cas_displayname_attribute = None self.cas_required_attributes = [] self.cas_enable_registration = False + self.cas_allow_numeric_ids = False + self.cas_numeric_ids_prefix = "u" # CAS uses a legacy required attributes mapping, not the one provided by diff --git a/synapse/config/federation.py b/synapse/config/federation.py index 9032effac..cf29fa256 100644 --- a/synapse/config/federation.py +++ b/synapse/config/federation.py @@ -42,6 +42,10 @@ class FederationConfig(Config): for domain in federation_domain_whitelist: self.federation_domain_whitelist[domain] = True + self.federation_whitelist_endpoint_enabled = config.get( + "federation_whitelist_endpoint_enabled", False + ) + federation_metrics_domains = config.get("federation_metrics_domains") or [] validate_config( _METRICS_FOR_DOMAINS_SCHEMA, diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index 23d125412..db0f5076a 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -180,7 +180,11 @@ def _parse_auth_header(header_bytes: bytes) -> Tuple[str, str, str, Optional[str """ try: header_str = header_bytes.decode("utf-8") - params = re.split(" +", header_str)[1].split(",") + space_or_tab = "[ \t]" + params = re.split( + rf"{space_or_tab}*,{space_or_tab}*", + re.split(r"^X-Matrix +", header_str, maxsplit=1)[1], + ) param_dict: Dict[str, str] = { k.lower(): v for k, v in [param.split("=", maxsplit=1) for param in params] } diff --git a/synapse/handlers/cas.py b/synapse/handlers/cas.py index 153123ee8..cc3d641b7 100644 --- a/synapse/handlers/cas.py +++ b/synapse/handlers/cas.py @@ -78,6 +78,8 @@ class CasHandler: self._cas_displayname_attribute = hs.config.cas.cas_displayname_attribute self._cas_required_attributes = hs.config.cas.cas_required_attributes self._cas_enable_registration = hs.config.cas.cas_enable_registration + self._cas_allow_numeric_ids = hs.config.cas.cas_allow_numeric_ids + self._cas_numeric_ids_prefix = hs.config.cas.cas_numeric_ids_prefix self._http_client = hs.get_proxied_http_client() @@ -188,6 +190,9 @@ class CasHandler: for child in root[0]: if child.tag.endswith("user"): user = child.text + # if numeric user IDs are allowed and username is numeric then we add the prefix so Synapse can handle it + if self._cas_allow_numeric_ids and user is not None and user.isdigit(): + user = f"{self._cas_numeric_ids_prefix}{user}" if child.tag.endswith("attributes"): for attribute in child: # ElementTree library expands the namespace in diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index e51e282a9..6663d4b27 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -20,7 +20,7 @@ # import logging import random -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, List, Optional, Union from synapse.api.errors import ( AuthError, @@ -64,8 +64,10 @@ class ProfileHandler: self.user_directory_handler = hs.get_user_directory_handler() self.request_ratelimiter = hs.get_request_ratelimiter() - self.max_avatar_size = hs.config.server.max_avatar_size - self.allowed_avatar_mimetypes = hs.config.server.allowed_avatar_mimetypes + self.max_avatar_size: Optional[int] = hs.config.server.max_avatar_size + self.allowed_avatar_mimetypes: Optional[List[str]] = ( + hs.config.server.allowed_avatar_mimetypes + ) self._is_mine_server_name = hs.is_mine_server_name @@ -337,6 +339,12 @@ class ProfileHandler: return False if self.max_avatar_size: + if media_info.media_length is None: + logger.warning( + "Forbidding avatar change to %s: unknown media size", + mxc, + ) + return False # Ensure avatar does not exceed max allowed avatar size if media_info.media_length > self.max_avatar_size: logger.warning( diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index d4860e183..63664ef9c 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -20,6 +20,7 @@ # import itertools import logging +from enum import Enum from typing import ( TYPE_CHECKING, AbstractSet, @@ -112,12 +113,28 @@ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100 SyncRequestKey = Tuple[Any, ...] +class SyncVersion(Enum): + """ + Enum for specifying the version of sync request. This is used to key which type of + sync response that we are generating. + + This is different than the `sync_type` you might see used in other code below; which + specifies the sub-type sync request (e.g. initial_sync, full_state_sync, + incremental_sync) and is really only relevant for the `/sync` v2 endpoint. + """ + + # These string values are semantically significant because they are used in the the + # metrics + + # Traditional `/sync` endpoint + SYNC_V2 = "sync_v2" + + @attr.s(slots=True, frozen=True, auto_attribs=True) class SyncConfig: user: UserID filter_collection: FilterCollection is_guest: bool - request_key: SyncRequestKey device_id: Optional[str] @@ -309,6 +326,8 @@ class SyncHandler: self, requester: Requester, sync_config: SyncConfig, + sync_version: SyncVersion, + request_key: SyncRequestKey, since_token: Optional[StreamToken] = None, timeout: int = 0, full_state: bool = False, @@ -316,6 +335,17 @@ class SyncHandler: """Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then return an empty sync result. + + Args: + requester: The user requesting the sync response. + sync_config: Config/info necessary to process the sync request. + sync_version: Determines what kind of sync response to generate. + request_key: The key to use for caching the response. + since_token: The point in the stream to sync from. + timeout: How long to wait for new data to arrive before giving up. + full_state: Whether to return the full state for each room. + Returns: + When `SyncVersion.SYNC_V2`, returns a full `SyncResult`. """ # If the user is not part of the mau group, then check that limits have # not been exceeded (if not part of the group by this point, almost certain @@ -324,9 +354,10 @@ class SyncHandler: await self.auth_blocking.check_auth_blocking(requester=requester) res = await self.response_cache.wrap( - sync_config.request_key, + request_key, self._wait_for_sync_for_user, sync_config, + sync_version, since_token, timeout, full_state, @@ -338,6 +369,7 @@ class SyncHandler: async def _wait_for_sync_for_user( self, sync_config: SyncConfig, + sync_version: SyncVersion, since_token: Optional[StreamToken], timeout: int, full_state: bool, @@ -363,9 +395,11 @@ class SyncHandler: else: sync_type = "incremental_sync" + sync_label = f"{sync_version}:{sync_type}" + context = current_context() if context: - context.tag = sync_type + context.tag = sync_label # if we have a since token, delete any to-device messages before that token # (since we now know that the device has received them) @@ -384,14 +418,16 @@ class SyncHandler: # we are going to return immediately, so don't bother calling # notifier.wait_for_events. result: SyncResult = await self.current_sync_for_user( - sync_config, since_token, full_state=full_state + sync_config, sync_version, since_token, full_state=full_state ) else: # Otherwise, we wait for something to happen and report it to the user. async def current_sync_callback( before_token: StreamToken, after_token: StreamToken ) -> SyncResult: - return await self.current_sync_for_user(sync_config, since_token) + return await self.current_sync_for_user( + sync_config, sync_version, since_token + ) result = await self.notifier.wait_for_events( sync_config.user.to_string(), @@ -416,13 +452,14 @@ class SyncHandler: lazy_loaded = "true" else: lazy_loaded = "false" - non_empty_sync_counter.labels(sync_type, lazy_loaded).inc() + non_empty_sync_counter.labels(sync_label, lazy_loaded).inc() return result async def current_sync_for_user( self, sync_config: SyncConfig, + sync_version: SyncVersion, since_token: Optional[StreamToken] = None, full_state: bool = False, ) -> SyncResult: @@ -431,12 +468,26 @@ class SyncHandler: This is a wrapper around `generate_sync_result` which starts an open tracing span to track the sync. See `generate_sync_result` for the next part of your indoctrination. + + Args: + sync_config: Config/info necessary to process the sync request. + sync_version: Determines what kind of sync response to generate. + since_token: The point in the stream to sync from.p. + full_state: Whether to return the full state for each room. + Returns: + When `SyncVersion.SYNC_V2`, returns a full `SyncResult`. """ with start_active_span("sync.current_sync_for_user"): log_kv({"since_token": since_token}) - sync_result = await self.generate_sync_result( - sync_config, since_token, full_state - ) + # Go through the `/sync` v2 path + if sync_version == SyncVersion.SYNC_V2: + sync_result: SyncResult = await self.generate_sync_result( + sync_config, since_token, full_state + ) + else: + raise Exception( + f"Unknown sync_version (this is a Synapse problem): {sync_version}" + ) set_tag(SynapseTags.SYNC_RESULT, bool(sync_result)) return sync_result @@ -1517,128 +1568,17 @@ class SyncHandler: # See https://github.com/matrix-org/matrix-doc/issues/1144 raise NotImplementedError() - # Note: we get the users room list *before* we get the current token, this - # avoids checking back in history if rooms are joined after the token is fetched. - token_before_rooms = self.event_sources.get_current_token() - mutable_joined_room_ids = set(await self.store.get_rooms_for_user(user_id)) - - # NB: The now_token gets changed by some of the generate_sync_* methods, - # this is due to some of the underlying streams not supporting the ability - # to query up to a given point. - # Always use the `now_token` in `SyncResultBuilder` - now_token = self.event_sources.get_current_token() - log_kv({"now_token": now_token}) - - # Since we fetched the users room list before the token, there's a small window - # during which membership events may have been persisted, so we fetch these now - # and modify the joined room list for any changes between the get_rooms_for_user - # call and the get_current_token call. - membership_change_events = [] - if since_token: - membership_change_events = await self.store.get_membership_changes_for_user( - user_id, - since_token.room_key, - now_token.room_key, - self.rooms_to_exclude_globally, - ) - - mem_last_change_by_room_id: Dict[str, EventBase] = {} - for event in membership_change_events: - mem_last_change_by_room_id[event.room_id] = event - - # For the latest membership event in each room found, add/remove the room ID - # from the joined room list accordingly. In this case we only care if the - # latest change is JOIN. - - for room_id, event in mem_last_change_by_room_id.items(): - assert event.internal_metadata.stream_ordering - if ( - event.internal_metadata.stream_ordering - < token_before_rooms.room_key.stream - ): - continue - - logger.info( - "User membership change between getting rooms and current token: %s %s %s", - user_id, - event.membership, - room_id, - ) - # User joined a room - we have to then check the room state to ensure we - # respect any bans if there's a race between the join and ban events. - if event.membership == Membership.JOIN: - user_ids_in_room = await self.store.get_users_in_room(room_id) - if user_id in user_ids_in_room: - mutable_joined_room_ids.add(room_id) - # The user left the room, or left and was re-invited but not joined yet - else: - mutable_joined_room_ids.discard(room_id) - - # Tweak the set of rooms to return to the client for eager (non-lazy) syncs. - mutable_rooms_to_exclude = set(self.rooms_to_exclude_globally) - if not sync_config.filter_collection.lazy_load_members(): - # Non-lazy syncs should never include partially stated rooms. - # Exclude all partially stated rooms from this sync. - results = await self.store.is_partial_state_room_batched( - mutable_joined_room_ids - ) - mutable_rooms_to_exclude.update( - room_id - for room_id, is_partial_state in results.items() - if is_partial_state - ) - membership_change_events = [ - event - for event in membership_change_events - if not results.get(event.room_id, False) - ] - - # Incremental eager syncs should additionally include rooms that - # - we are joined to - # - are full-stated - # - became fully-stated at some point during the sync period - # (These rooms will have been omitted during a previous eager sync.) - forced_newly_joined_room_ids: Set[str] = set() - if since_token and not sync_config.filter_collection.lazy_load_members(): - un_partial_stated_rooms = ( - await self.store.get_un_partial_stated_rooms_between( - since_token.un_partial_stated_rooms_key, - now_token.un_partial_stated_rooms_key, - mutable_joined_room_ids, - ) - ) - results = await self.store.is_partial_state_room_batched( - un_partial_stated_rooms - ) - forced_newly_joined_room_ids.update( - room_id - for room_id, is_partial_state in results.items() - if not is_partial_state - ) - - # Now we have our list of joined room IDs, exclude as configured and freeze - joined_room_ids = frozenset( - room_id - for room_id in mutable_joined_room_ids - if room_id not in mutable_rooms_to_exclude + sync_result_builder = await self.get_sync_result_builder( + sync_config, + since_token, + full_state, ) logger.debug( "Calculating sync response for %r between %s and %s", sync_config.user, - since_token, - now_token, - ) - - sync_result_builder = SyncResultBuilder( - sync_config, - full_state, - since_token=since_token, - now_token=now_token, - joined_room_ids=joined_room_ids, - excluded_room_ids=frozenset(mutable_rooms_to_exclude), - forced_newly_joined_room_ids=frozenset(forced_newly_joined_room_ids), - membership_change_events=membership_change_events, + sync_result_builder.since_token, + sync_result_builder.now_token, ) logger.debug("Fetching account data") @@ -1750,6 +1690,149 @@ class SyncHandler: next_batch=sync_result_builder.now_token, ) + async def get_sync_result_builder( + self, + sync_config: SyncConfig, + since_token: Optional[StreamToken] = None, + full_state: bool = False, + ) -> "SyncResultBuilder": + """ + Assemble a `SyncResultBuilder` with all of the initial context to + start building up the sync response: + + - Membership changes between the last sync and the current sync. + - Joined room IDs (minus any rooms to exclude). + - Rooms that became fully-stated/un-partial stated since the last sync. + + Args: + sync_config: Config/info necessary to process the sync request. + since_token: The point in the stream to sync from. + full_state: Whether to return the full state for each room. + + Returns: + `SyncResultBuilder` ready to start generating parts of the sync response. + """ + user_id = sync_config.user.to_string() + + # Note: we get the users room list *before* we get the current token, this + # avoids checking back in history if rooms are joined after the token is fetched. + token_before_rooms = self.event_sources.get_current_token() + mutable_joined_room_ids = set(await self.store.get_rooms_for_user(user_id)) + + # NB: The `now_token` gets changed by some of the `generate_sync_*` methods, + # this is due to some of the underlying streams not supporting the ability + # to query up to a given point. + # Always use the `now_token` in `SyncResultBuilder` + now_token = self.event_sources.get_current_token() + log_kv({"now_token": now_token}) + + # Since we fetched the users room list before the token, there's a small window + # during which membership events may have been persisted, so we fetch these now + # and modify the joined room list for any changes between the get_rooms_for_user + # call and the get_current_token call. + membership_change_events = [] + if since_token: + membership_change_events = await self.store.get_membership_changes_for_user( + user_id, + since_token.room_key, + now_token.room_key, + self.rooms_to_exclude_globally, + ) + + mem_last_change_by_room_id: Dict[str, EventBase] = {} + for event in membership_change_events: + mem_last_change_by_room_id[event.room_id] = event + + # For the latest membership event in each room found, add/remove the room ID + # from the joined room list accordingly. In this case we only care if the + # latest change is JOIN. + + for room_id, event in mem_last_change_by_room_id.items(): + assert event.internal_metadata.stream_ordering + if ( + event.internal_metadata.stream_ordering + < token_before_rooms.room_key.stream + ): + continue + + logger.info( + "User membership change between getting rooms and current token: %s %s %s", + user_id, + event.membership, + room_id, + ) + # User joined a room - we have to then check the room state to ensure we + # respect any bans if there's a race between the join and ban events. + if event.membership == Membership.JOIN: + user_ids_in_room = await self.store.get_users_in_room(room_id) + if user_id in user_ids_in_room: + mutable_joined_room_ids.add(room_id) + # The user left the room, or left and was re-invited but not joined yet + else: + mutable_joined_room_ids.discard(room_id) + + # Tweak the set of rooms to return to the client for eager (non-lazy) syncs. + mutable_rooms_to_exclude = set(self.rooms_to_exclude_globally) + if not sync_config.filter_collection.lazy_load_members(): + # Non-lazy syncs should never include partially stated rooms. + # Exclude all partially stated rooms from this sync. + results = await self.store.is_partial_state_room_batched( + mutable_joined_room_ids + ) + mutable_rooms_to_exclude.update( + room_id + for room_id, is_partial_state in results.items() + if is_partial_state + ) + membership_change_events = [ + event + for event in membership_change_events + if not results.get(event.room_id, False) + ] + + # Incremental eager syncs should additionally include rooms that + # - we are joined to + # - are full-stated + # - became fully-stated at some point during the sync period + # (These rooms will have been omitted during a previous eager sync.) + forced_newly_joined_room_ids: Set[str] = set() + if since_token and not sync_config.filter_collection.lazy_load_members(): + un_partial_stated_rooms = ( + await self.store.get_un_partial_stated_rooms_between( + since_token.un_partial_stated_rooms_key, + now_token.un_partial_stated_rooms_key, + mutable_joined_room_ids, + ) + ) + results = await self.store.is_partial_state_room_batched( + un_partial_stated_rooms + ) + forced_newly_joined_room_ids.update( + room_id + for room_id, is_partial_state in results.items() + if not is_partial_state + ) + + # Now we have our list of joined room IDs, exclude as configured and freeze + joined_room_ids = frozenset( + room_id + for room_id in mutable_joined_room_ids + if room_id not in mutable_rooms_to_exclude + ) + + sync_result_builder = SyncResultBuilder( + sync_config, + full_state, + since_token=since_token, + now_token=now_token, + joined_room_ids=joined_room_ids, + excluded_room_ids=frozenset(mutable_rooms_to_exclude), + forced_newly_joined_room_ids=frozenset(forced_newly_joined_room_ids), + membership_change_events=membership_change_events, + ) + + return sync_result_builder + @measure_func("_generate_sync_entry_for_device_list") async def _generate_sync_entry_for_device_list( self, @@ -1798,7 +1881,7 @@ class SyncHandler: users_that_have_changed = set() - joined_rooms = sync_result_builder.joined_room_ids + joined_room_ids = sync_result_builder.joined_room_ids # Step 1a, check for changes in devices of users we share a room # with @@ -1823,7 +1906,7 @@ class SyncHandler: # or if the changed user is the syncing user (as we always # want to include device list updates of their own devices). if user_id == changed_user_id or any( - rid in joined_rooms for rid in entries + rid in joined_room_ids for rid in entries ): users_that_have_changed.add(changed_user_id) else: @@ -1857,7 +1940,7 @@ class SyncHandler: # Remove any users that we still share a room with. left_users_rooms = await self.store.get_rooms_for_users(newly_left_users) for user_id, entries in left_users_rooms.items(): - if any(rid in joined_rooms for rid in entries): + if any(rid in joined_room_ids for rid in entries): newly_left_users.discard(user_id) return DeviceListUpdates(changed=users_that_have_changed, left=newly_left_users) @@ -1944,23 +2027,19 @@ class SyncHandler: ) if push_rules_changed: - global_account_data = { - AccountDataTypes.PUSH_RULES: await self._push_rules_handler.push_rules_for_user( - sync_config.user - ), - **global_account_data, - } + global_account_data = dict(global_account_data) + global_account_data[AccountDataTypes.PUSH_RULES] = ( + await self._push_rules_handler.push_rules_for_user(sync_config.user) + ) else: all_global_account_data = await self.store.get_global_account_data_for_user( user_id ) - global_account_data = { - AccountDataTypes.PUSH_RULES: await self._push_rules_handler.push_rules_for_user( - sync_config.user - ), - **all_global_account_data, - } + global_account_data = dict(all_global_account_data) + global_account_data[AccountDataTypes.PUSH_RULES] = ( + await self._push_rules_handler.push_rules_for_user(sync_config.user) + ) account_data_for_user = ( await sync_config.filter_collection.filter_global_account_data( diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index ba257d34e..5e5387fdc 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -55,6 +55,7 @@ from synapse.replication.tcp.streams.partial_state import ( ) from synapse.types import PersistedEventPosition, ReadReceipt, StreamKeyType, UserID from synapse.util.async_helpers import Linearizer, timeout_deferred +from synapse.util.iterutils import batch_iter from synapse.util.metrics import Measure if TYPE_CHECKING: @@ -150,9 +151,15 @@ class ReplicationDataHandler: if row.entity.startswith("@") and not row.is_signature: room_ids = await self.store.get_rooms_for_user(row.entity) all_room_ids.update(room_ids) - self.notifier.on_new_event( - StreamKeyType.DEVICE_LIST, token, rooms=all_room_ids - ) + + # `all_room_ids` can be large, so let's wake up those streams in batches + for batched_room_ids in batch_iter(all_room_ids, 100): + self.notifier.on_new_event( + StreamKeyType.DEVICE_LIST, token, rooms=batched_room_ids + ) + + # Yield to reactor so that we don't block. + await self._clock.sleep(0) elif stream_name == PushersStream.NAME: for row in rows: if row.deleted: diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py index 143f05765..27bf53314 100644 --- a/synapse/rest/client/rendezvous.py +++ b/synapse/rest/client/rendezvous.py @@ -34,6 +34,9 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +# n.b [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) has now been closed. +# However, we want to keep this implementation around for some time. +# TODO: define an end-of-life date for this implementation. class MSC3886RendezvousServlet(RestServlet): """ This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 2b103ca6a..4a57eaf93 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -40,6 +40,7 @@ from synapse.handlers.sync import ( KnockedSyncResult, SyncConfig, SyncResult, + SyncVersion, ) from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string @@ -47,6 +48,7 @@ from synapse.http.site import SynapseRequest from synapse.logging.opentracing import trace_with_opname from synapse.types import JsonDict, Requester, StreamToken from synapse.util import json_decoder +from synapse.util.caches.lrucache import LruCache from ._base import client_patterns, set_timeline_upper_limit @@ -110,6 +112,11 @@ class SyncRestServlet(RestServlet): self._msc2654_enabled = hs.config.experimental.msc2654_enabled self._msc3773_enabled = hs.config.experimental.msc3773_enabled + self._json_filter_cache: LruCache[str, bool] = LruCache( + max_size=1000, + cache_name="sync_valid_filter", + ) + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -177,7 +184,13 @@ class SyncRestServlet(RestServlet): filter_object = json_decoder.decode(filter_id) except Exception: raise SynapseError(400, "Invalid filter JSON", errcode=Codes.NOT_JSON) - self.filtering.check_valid_filter(filter_object) + + # We cache the validation, as this can get quite expensive if people use + # a literal json blob as a query param. + if not self._json_filter_cache.get(filter_id): + self.filtering.check_valid_filter(filter_object) + self._json_filter_cache[filter_id] = True + set_timeline_upper_limit( filter_object, self.hs.config.server.filter_timeline_limit ) @@ -197,7 +210,6 @@ class SyncRestServlet(RestServlet): user=user, filter_collection=filter_collection, is_guest=requester.is_guest, - request_key=request_key, device_id=device_id, ) @@ -220,6 +232,8 @@ class SyncRestServlet(RestServlet): sync_result = await self.sync_handler.wait_for_sync_for_user( requester, sync_config, + SyncVersion.SYNC_V2, + request_key, since_token=since_token, timeout=timeout, full_state=full_state, diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index ba6576d4d..7b5bfc042 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -23,6 +23,7 @@ from typing import TYPE_CHECKING, Mapping from twisted.web.resource import Resource +from synapse.rest.synapse.client.federation_whitelist import FederationWhitelistResource from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource from synapse.rest.synapse.client.pick_idp import PickIdpResource from synapse.rest.synapse.client.pick_username import pick_username_resource @@ -77,6 +78,9 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc # To be removed in Synapse v1.32.0. resources["/_matrix/saml2"] = res + if hs.config.federation.federation_whitelist_endpoint_enabled: + resources[FederationWhitelistResource.PATH] = FederationWhitelistResource(hs) + if hs.config.experimental.msc4108_enabled: resources["/_synapse/client/rendezvous"] = MSC4108RendezvousSessionResource(hs) diff --git a/synapse/rest/synapse/client/federation_whitelist.py b/synapse/rest/synapse/client/federation_whitelist.py new file mode 100644 index 000000000..2b8f0320e --- /dev/null +++ b/synapse/rest/synapse/client/federation_whitelist.py @@ -0,0 +1,66 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# + +import logging +from typing import TYPE_CHECKING, Tuple + +from synapse.http.server import DirectServeJsonResource +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class FederationWhitelistResource(DirectServeJsonResource): + """Custom endpoint (disabled by default) to fetch the federation whitelist + config. + + Only enabled if `federation_whitelist_endpoint_enabled` feature is enabled. + + Response format: + + { + "whitelist_enabled": true, // Whether the federation whitelist is being enforced + "whitelist": [ // Which server names are allowed by the whitelist + "example.com" + ] + } + """ + + PATH = "/_synapse/client/v1/config/federation_whitelist" + + def __init__(self, hs: "HomeServer"): + super().__init__() + + self._federation_whitelist = hs.config.federation.federation_domain_whitelist + + self._auth = hs.get_auth() + + async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await self._auth.get_user_by_req(request) + + whitelist = [] + if self._federation_whitelist: + # federation_whitelist is actually a dict, not a list + whitelist = list(self._federation_whitelist) + + return_dict: JsonDict = { + "whitelist_enabled": self._federation_whitelist is not None, + "whitelist": whitelist, + } + + return 200, return_dict diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 81c7bf371..820510954 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -21,13 +21,11 @@ # import logging -from abc import abstractmethod from enum import Enum from typing import ( TYPE_CHECKING, AbstractSet, Any, - Awaitable, Collection, Dict, List, @@ -53,7 +51,7 @@ from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.config.homeserver import HomeServerConfig from synapse.events import EventBase from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream -from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause +from synapse.storage._base import db_to_json, make_in_list_sql_clause from synapse.storage.database import ( DatabasePool, LoggingDatabaseConnection, @@ -1684,6 +1682,58 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): return True + async def set_room_is_public(self, room_id: str, is_public: bool) -> None: + await self.db_pool.simple_update_one( + table="rooms", + keyvalues={"room_id": room_id}, + updatevalues={"is_public": is_public}, + desc="set_room_is_public", + ) + + async def set_room_is_public_appservice( + self, room_id: str, appservice_id: str, network_id: str, is_public: bool + ) -> None: + """Edit the appservice/network specific public room list. + + Each appservice can have a number of published room lists associated + with them, keyed off of an appservice defined `network_id`, which + basically represents a single instance of a bridge to a third party + network. + + Args: + room_id + appservice_id + network_id + is_public: Whether to publish or unpublish the room from the list. + """ + + if is_public: + await self.db_pool.simple_upsert( + table="appservice_room_list", + keyvalues={ + "appservice_id": appservice_id, + "network_id": network_id, + "room_id": room_id, + }, + values={}, + insertion_values={ + "appservice_id": appservice_id, + "network_id": network_id, + "room_id": room_id, + }, + desc="set_room_is_public_appservice_true", + ) + else: + await self.db_pool.simple_delete( + table="appservice_room_list", + keyvalues={ + "appservice_id": appservice_id, + "network_id": network_id, + "room_id": room_id, + }, + desc="set_room_is_public_appservice_false", + ) + class _BackgroundUpdates: REMOVE_TOMESTONED_ROOMS_BG_UPDATE = "remove_tombstoned_rooms_from_directory" @@ -1702,7 +1752,7 @@ _REPLACE_ROOM_DEPTH_SQL_COMMANDS = ( ) -class RoomBackgroundUpdateStore(SQLBaseStore): +class RoomBackgroundUpdateStore(RoomWorkerStore): def __init__( self, database: DatabasePool, @@ -1935,14 +1985,6 @@ class RoomBackgroundUpdateStore(SQLBaseStore): return len(rooms) - @abstractmethod - def set_room_is_public(self, room_id: str, is_public: bool) -> Awaitable[None]: - # this will need to be implemented if a background update is performed with - # existing (tombstoned, public) rooms in the database. - # - # It's overridden by RoomStore for the synapse master. - raise NotImplementedError() - async def has_auth_chain_index(self, room_id: str) -> bool: """Check if the room has (or can have) a chain cover index. @@ -2349,62 +2391,6 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore): }, ) - async def set_room_is_public(self, room_id: str, is_public: bool) -> None: - await self.db_pool.simple_update_one( - table="rooms", - keyvalues={"room_id": room_id}, - updatevalues={"is_public": is_public}, - desc="set_room_is_public", - ) - - self.hs.get_notifier().on_new_replication_data() - - async def set_room_is_public_appservice( - self, room_id: str, appservice_id: str, network_id: str, is_public: bool - ) -> None: - """Edit the appservice/network specific public room list. - - Each appservice can have a number of published room lists associated - with them, keyed off of an appservice defined `network_id`, which - basically represents a single instance of a bridge to a third party - network. - - Args: - room_id - appservice_id - network_id - is_public: Whether to publish or unpublish the room from the list. - """ - - if is_public: - await self.db_pool.simple_upsert( - table="appservice_room_list", - keyvalues={ - "appservice_id": appservice_id, - "network_id": network_id, - "room_id": room_id, - }, - values={}, - insertion_values={ - "appservice_id": appservice_id, - "network_id": network_id, - "room_id": room_id, - }, - desc="set_room_is_public_appservice_true", - ) - else: - await self.db_pool.simple_delete( - table="appservice_room_list", - keyvalues={ - "appservice_id": appservice_id, - "network_id": network_id, - "room_id": room_id, - }, - desc="set_room_is_public_appservice_false", - ) - - self.hs.get_notifier().on_new_replication_data() - async def add_event_report( self, room_id: str, diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index a24638c9e..a59e168db 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -116,8 +116,9 @@ class TestRatelimiter(unittest.HomeserverTestCase): # Should raise with self.assertRaises(LimitExceededError) as context: self.get_success_or_raise( - limiter.ratelimit(None, key="test_id", _time_now_s=5) + limiter.ratelimit(None, key="test_id", _time_now_s=5), by=0.5 ) + self.assertEqual(context.exception.retry_after_ms, 5000) # Shouldn't raise @@ -192,7 +193,7 @@ class TestRatelimiter(unittest.HomeserverTestCase): # Second attempt, 1s later, will fail with self.assertRaises(LimitExceededError) as context: self.get_success_or_raise( - limiter.ratelimit(None, key=("test_id",), _time_now_s=1) + limiter.ratelimit(None, key=("test_id",), _time_now_s=1), by=0.5 ) self.assertEqual(context.exception.retry_after_ms, 9000) diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index e51cdf01a..e48983ddf 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -36,7 +36,7 @@ from synapse.server import HomeServer from synapse.types import JsonDict, StreamToken, create_requester from synapse.util import Clock -from tests.handlers.test_sync import generate_sync_config +from tests.handlers.test_sync import SyncRequestKey, SyncVersion, generate_sync_config from tests.unittest import ( FederatingHomeserverTestCase, HomeserverTestCase, @@ -498,6 +498,15 @@ def send_presence_update( return channel.json_body +_request_key = 0 + + +def generate_request_key() -> SyncRequestKey: + global _request_key + _request_key += 1 + return ("request_key", _request_key) + + def sync_presence( testcase: HomeserverTestCase, user_id: str, @@ -521,7 +530,11 @@ def sync_presence( sync_config = generate_sync_config(requester.user.to_string()) sync_result = testcase.get_success( testcase.hs.get_sync_handler().wait_for_sync_for_user( - requester, sync_config, since_token + requester, + sync_config, + SyncVersion.SYNC_V2, + generate_request_key(), + since_token, ) ) diff --git a/tests/federation/transport/server/test__base.py b/tests/federation/transport/server/test__base.py index 065e85957..0e3b41ec4 100644 --- a/tests/federation/transport/server/test__base.py +++ b/tests/federation/transport/server/test__base.py @@ -147,3 +147,10 @@ class BaseFederationAuthorizationTests(unittest.TestCase): ), ("foo", "ed25519:1", "sig", "bar"), ) + # test that "optional whitespace(s)" (space and tabulation) are allowed between comma-separated auth-param components + self.assertEqual( + _parse_auth_header( + b'X-Matrix origin=foo , key="ed25519:1", sig="sig", destination="bar", extra_field=ignored' + ), + ("foo", "ed25519:1", "sig", "bar"), + ) diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index b819b60c5..3fe5b0a1b 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -483,6 +483,7 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): event.room_version, ), exc=LimitExceededError, + by=0.5, ) def _build_and_send_join_event( diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py index 3e28117e2..df43ce581 100644 --- a/tests/handlers/test_room_member.py +++ b/tests/handlers/test_room_member.py @@ -70,6 +70,7 @@ class TestJoinsLimitedByPerRoomRateLimiter(FederatingHomeserverTestCase): action=Membership.JOIN, ), LimitExceededError, + by=0.5, ) @override_config({"rc_joins_per_room": {"per_second": 0, "burst_count": 2}}) @@ -206,6 +207,7 @@ class TestJoinsLimitedByPerRoomRateLimiter(FederatingHomeserverTestCase): remote_room_hosts=[self.OTHER_SERVER_NAME], ), LimitExceededError, + by=0.5, ) # TODO: test that remote joins to a room are rate limited. @@ -273,6 +275,7 @@ class TestReplicatedJoinsLimitedByPerRoomRateLimiter(BaseMultiWorkerStreamTestCa action=Membership.JOIN, ), LimitExceededError, + by=0.5, ) # Try to join as Chris on the original worker. Should get denied because Alice @@ -285,6 +288,7 @@ class TestReplicatedJoinsLimitedByPerRoomRateLimiter(BaseMultiWorkerStreamTestCa action=Membership.JOIN, ), LimitExceededError, + by=0.5, ) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 2780d29ca..02371ce72 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -24,14 +24,14 @@ from parameterized import parameterized from twisted.test.proto_helpers import MemoryReactor -from synapse.api.constants import EventTypes, JoinRules +from synapse.api.constants import AccountDataTypes, EventTypes, JoinRules from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import FilterCollection, Filtering from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.federation.federation_base import event_from_pdu_json -from synapse.handlers.sync import SyncConfig, SyncResult +from synapse.handlers.sync import SyncConfig, SyncRequestKey, SyncResult, SyncVersion from synapse.rest import admin from synapse.rest.client import knock, login, room from synapse.server import HomeServer @@ -41,6 +41,14 @@ from synapse.util import Clock import tests.unittest import tests.utils +_request_key = 0 + + +def generate_request_key() -> SyncRequestKey: + global _request_key + _request_key += 1 + return ("request_key", _request_key) + class SyncTestCase(tests.unittest.HomeserverTestCase): """Tests Sync Handler.""" @@ -73,13 +81,23 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Check that the happy case does not throw errors self.get_success(self.store.upsert_monthly_active_user(user_id1)) self.get_success( - self.sync_handler.wait_for_sync_for_user(requester, sync_config) + self.sync_handler.wait_for_sync_for_user( + requester, + sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), + ) ) # Test that global lock works self.auth_blocking._hs_disabled = True e = self.get_failure( - self.sync_handler.wait_for_sync_for_user(requester, sync_config), + self.sync_handler.wait_for_sync_for_user( + requester, + sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), + ), ResourceLimitError, ) self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) @@ -90,7 +108,12 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): requester = create_requester(user_id2) e = self.get_failure( - self.sync_handler.wait_for_sync_for_user(requester, sync_config), + self.sync_handler.wait_for_sync_for_user( + requester, + sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), + ), ResourceLimitError, ) self.assertEqual(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) @@ -109,7 +132,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): requester = create_requester(user) initial_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - requester, sync_config=generate_sync_config(user, device_id="dev") + requester, + sync_config=generate_sync_config(user, device_id="dev"), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) @@ -140,7 +166,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # The rooms should appear in the sync response. result = self.get_success( self.sync_handler.wait_for_sync_for_user( - requester, sync_config=generate_sync_config(user) + requester, + sync_config=generate_sync_config(user), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) self.assertIn(joined_room, [r.room_id for r in result.joined]) @@ -152,6 +181,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( requester, sync_config=generate_sync_config(user, device_id="dev"), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_result.next_batch, ) ) @@ -180,7 +211,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Get a new request key. result = self.get_success( self.sync_handler.wait_for_sync_for_user( - requester, sync_config=generate_sync_config(user) + requester, + sync_config=generate_sync_config(user), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) self.assertNotIn(joined_room, [r.room_id for r in result.joined]) @@ -192,6 +226,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( requester, sync_config=generate_sync_config(user, device_id="dev"), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_result.next_batch, ) ) @@ -231,7 +267,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Do a sync as Alice to get the latest event in the room. alice_sync_result: SyncResult = self.get_success( self.sync_handler.wait_for_sync_for_user( - create_requester(owner), generate_sync_config(owner) + create_requester(owner), + generate_sync_config(owner), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) self.assertEqual(len(alice_sync_result.joined), 1) @@ -251,7 +290,12 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): eve_requester = create_requester(eve) eve_sync_config = generate_sync_config(eve) eve_sync_after_ban: SyncResult = self.get_success( - self.sync_handler.wait_for_sync_for_user(eve_requester, eve_sync_config) + self.sync_handler.wait_for_sync_for_user( + eve_requester, + eve_sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), + ) ) # Sanity check this sync result. We shouldn't be joined to the room. @@ -268,6 +312,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( eve_requester, eve_sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=eve_sync_after_ban.next_batch, ) ) @@ -279,6 +325,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( eve_requester, eve_sync_config, + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=None, ) ) @@ -310,7 +358,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Do an initial sync as Alice to get a known starting point. initial_sync_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - alice_requester, generate_sync_config(alice) + alice_requester, + generate_sync_config(alice), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) last_room_creation_event_id = ( @@ -338,6 +389,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.hs, {"room": {"timeline": {"limit": 2}}} ), ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) ) @@ -380,7 +433,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Do an initial sync as Alice to get a known starting point. initial_sync_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - alice_requester, generate_sync_config(alice) + alice_requester, + generate_sync_config(alice), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) last_room_creation_event_id = ( @@ -418,6 +474,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): }, ), ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) ) @@ -461,7 +519,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Do an initial sync as Alice to get a known starting point. initial_sync_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - alice_requester, generate_sync_config(alice) + alice_requester, + generate_sync_config(alice), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) last_room_creation_event_id = ( @@ -486,6 +547,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.hs, {"room": {"timeline": {"limit": 1}}} ), ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) ) @@ -515,6 +578,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.hs, {"room": {"timeline": {"limit": 1}}} ), ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=incremental_sync.next_batch, ) ) @@ -574,7 +639,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # Do an initial sync to get a known starting point. initial_sync_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - alice_requester, generate_sync_config(alice) + alice_requester, + generate_sync_config(alice), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) last_room_creation_event_id = ( @@ -598,6 +666,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.hs, {"room": {"timeline": {"limit": 1}}} ), ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) room_sync = initial_sync_result.joined[0] @@ -618,6 +688,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) ) @@ -668,7 +740,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): initial_sync_result = self.get_success( self.sync_handler.wait_for_sync_for_user( - bob_requester, generate_sync_config(bob) + bob_requester, + generate_sync_config(bob), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) @@ -699,6 +774,8 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): generate_sync_config( bob, filter_collection=FilterCollection(self.hs, filter_dict) ), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), since_token=None if initial_sync else initial_sync_result.next_batch, ) ).archived[0] @@ -791,7 +868,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): # but that it does not come down /sync in public room sync_result: SyncResult = self.get_success( self.sync_handler.wait_for_sync_for_user( - create_requester(user), generate_sync_config(user) + create_requester(user), + generate_sync_config(user), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) event_ids = [] @@ -837,7 +917,10 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): private_sync_result: SyncResult = self.get_success( self.sync_handler.wait_for_sync_for_user( - create_requester(user2), generate_sync_config(user2) + create_requester(user2), + generate_sync_config(user2), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), ) ) priv_event_ids = [] @@ -846,8 +929,35 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.assertIn(private_call_event.event_id, priv_event_ids) + def test_push_rules_with_bad_account_data(self) -> None: + """Some old accounts have managed to set a `m.push_rules` account data, + which we should ignore in /sync response. + """ -_request_key = 0 + user = self.register_user("alice", "password") + + # Insert the bad account data. + self.get_success( + self.store.add_account_data_for_user(user, AccountDataTypes.PUSH_RULES, {}) + ) + + sync_result: SyncResult = self.get_success( + self.sync_handler.wait_for_sync_for_user( + create_requester(user), + generate_sync_config(user), + sync_version=SyncVersion.SYNC_V2, + request_key=generate_request_key(), + ) + ) + + for account_dict in sync_result.account_data: + if account_dict["type"] == AccountDataTypes.PUSH_RULES: + # We should have lots of push rules here, rather than the bad + # empty data. + self.assertNotEqual(account_dict["content"], {}) + return + + self.fail("No push rules found") def generate_sync_config( @@ -866,12 +976,9 @@ def generate_sync_config( if filter_collection is None: filter_collection = Filtering(Mock()).DEFAULT_FILTER_COLLECTION - global _request_key - _request_key += 1 return SyncConfig( user=UserID.from_string(user_id), filter_collection=filter_collection, is_guest=False, - request_key=("request_key", _request_key), device_id=device_id, ) diff --git a/tests/rest/synapse/__init__.py b/tests/rest/synapse/__init__.py new file mode 100644 index 000000000..e5138f67e --- /dev/null +++ b/tests/rest/synapse/__init__.py @@ -0,0 +1,12 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . diff --git a/tests/rest/synapse/client/__init__.py b/tests/rest/synapse/client/__init__.py new file mode 100644 index 000000000..e5138f67e --- /dev/null +++ b/tests/rest/synapse/client/__init__.py @@ -0,0 +1,12 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . diff --git a/tests/rest/synapse/client/test_federation_whitelist.py b/tests/rest/synapse/client/test_federation_whitelist.py new file mode 100644 index 000000000..f0067a8f2 --- /dev/null +++ b/tests/rest/synapse/client/test_federation_whitelist.py @@ -0,0 +1,119 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . + +from typing import Dict + +from twisted.web.resource import Resource + +from synapse.rest import admin +from synapse.rest.client import login +from synapse.rest.synapse.client import build_synapse_client_resource_tree + +from tests import unittest + + +class FederationWhitelistTests(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets_for_client_rest_resource, + login.register_servlets, + ] + + def create_resource_dict(self) -> Dict[str, Resource]: + base = super().create_resource_dict() + base.update(build_synapse_client_resource_tree(self.hs)) + return base + + def test_default(self) -> None: + "If the config option is not enabled, the endpoint should 404" + channel = self.make_request( + "GET", "/_synapse/client/v1/config/federation_whitelist", shorthand=False + ) + + self.assertEqual(channel.code, 404) + + @unittest.override_config({"federation_whitelist_endpoint_enabled": True}) + def test_no_auth(self) -> None: + "Endpoint requires auth when enabled" + + channel = self.make_request( + "GET", "/_synapse/client/v1/config/federation_whitelist", shorthand=False + ) + + self.assertEqual(channel.code, 401) + + @unittest.override_config({"federation_whitelist_endpoint_enabled": True}) + def test_no_whitelist(self) -> None: + "Test when there is no whitelist configured" + + self.register_user("user", "password") + tok = self.login("user", "password") + + channel = self.make_request( + "GET", + "/_synapse/client/v1/config/federation_whitelist", + shorthand=False, + access_token=tok, + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, {"whitelist_enabled": False, "whitelist": []} + ) + + @unittest.override_config( + { + "federation_whitelist_endpoint_enabled": True, + "federation_domain_whitelist": ["example.com"], + } + ) + def test_whitelist(self) -> None: + "Test when there is a whitelist configured" + + self.register_user("user", "password") + tok = self.login("user", "password") + + channel = self.make_request( + "GET", + "/_synapse/client/v1/config/federation_whitelist", + shorthand=False, + access_token=tok, + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, {"whitelist_enabled": True, "whitelist": ["example.com"]} + ) + + @unittest.override_config( + { + "federation_whitelist_endpoint_enabled": True, + "federation_domain_whitelist": ["example.com", "example.com"], + } + ) + def test_whitelist_no_duplicates(self) -> None: + "Test when there is a whitelist configured with duplicates, no duplicates are returned" + + self.register_user("user", "password") + tok = self.login("user", "password") + + channel = self.make_request( + "GET", + "/_synapse/client/v1/config/federation_whitelist", + shorthand=False, + access_token=tok, + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, {"whitelist_enabled": True, "whitelist": ["example.com"]} + ) diff --git a/tests/unittest.py b/tests/unittest.py index e6aad9ed4..18963b9e3 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -637,13 +637,13 @@ class HomeserverTestCase(TestCase): return self.successResultOf(deferred) def get_failure( - self, d: Awaitable[Any], exc: Type[_ExcType] + self, d: Awaitable[Any], exc: Type[_ExcType], by: float = 0.0 ) -> _TypedFailure[_ExcType]: """ Run a Deferred and get a Failure from it. The failure must be of the type `exc`. """ deferred: Deferred[Any] = ensureDeferred(d) # type: ignore[arg-type] - self.pump() + self.pump(by) return self.failureResultOf(deferred, exc) def get_success_or_raise(self, d: Awaitable[TV], by: float = 0.0) -> TV: