mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-02-17 16:14:06 -05:00
Merge branch 'master' into develop
This commit is contained in:
commit
6982db9651
15
CHANGES.md
15
CHANGES.md
@ -1,11 +1,20 @@
|
|||||||
Synapse 1.32.0rc1 (2021-04-13)
|
Synapse 1.32.0 (2021-04-20)
|
||||||
==============================
|
===========================
|
||||||
|
|
||||||
**Note:** This release requires Python 3.6+ and Postgres 9.6+ or SQLite 3.22+.
|
**Note:** This release requires Python 3.6+ and Postgres 9.6+ or SQLite 3.22+.
|
||||||
|
|
||||||
This release removes the deprecated `GET /_synapse/admin/v1/users/<user_id>` admin API. Please use the [v2 API](https://github.com/matrix-org/synapse/blob/develop/docs/admin_api/user_admin_api.rst#query-user-account) instead, which has improved capabilities.
|
This release removes the deprecated `GET /_synapse/admin/v1/users/<user_id>` admin API. Please use the [v2 API](https://github.com/matrix-org/synapse/blob/develop/docs/admin_api/user_admin_api.rst#query-user-account) instead, which has improved capabilities.
|
||||||
|
|
||||||
This release requires Application Services to use type `m.login.application_services` when registering users via the `/_matrix/client/r0/register` endpoint to comply with the spec. Please ensure your Application Services are up to date.
|
This release requires Application Services to use type `m.login.application_service` when registering users via the `/_matrix/client/r0/register` endpoint to comply with the spec. Please ensure your Application Services are up to date.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix the log lines of nested logging contexts. Broke in 1.32.0rc1. ([\#9829](https://github.com/matrix-org/synapse/issues/9829))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.32.0rc1 (2021-04-13)
|
||||||
|
==============================
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
18
UPGRADE.rst
18
UPGRADE.rst
@ -111,6 +111,14 @@ upon attempting to use a valid renewal token more than once.
|
|||||||
Upgrading to v1.32.0
|
Upgrading to v1.32.0
|
||||||
====================
|
====================
|
||||||
|
|
||||||
|
Dropping support for old Python, Postgres and SQLite versions
|
||||||
|
-------------------------------------------------------------
|
||||||
|
|
||||||
|
In line with our `deprecation policy <https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md>`_,
|
||||||
|
we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no longer supported upstream.
|
||||||
|
|
||||||
|
This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or SQLite 3.22+.
|
||||||
|
|
||||||
Removal of old List Accounts Admin API
|
Removal of old List Accounts Admin API
|
||||||
--------------------------------------
|
--------------------------------------
|
||||||
|
|
||||||
@ -121,6 +129,16 @@ has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``G
|
|||||||
|
|
||||||
The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
|
The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
|
||||||
|
|
||||||
|
Application Services must use type ``m.login.application_service`` when registering users
|
||||||
|
-----------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
In compliance with the
|
||||||
|
`Application Service spec <https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions>`_,
|
||||||
|
Application Services are now required to use the ``m.login.application_service`` type when registering users via the
|
||||||
|
``/_matrix/client/r0/register`` endpoint. This behaviour was deprecated in Synapse v1.30.0.
|
||||||
|
|
||||||
|
Please ensure your Application Services are up to date.
|
||||||
|
|
||||||
Upgrading to v1.29.0
|
Upgrading to v1.29.0
|
||||||
====================
|
====================
|
||||||
|
|
||||||
|
8
debian/changelog
vendored
8
debian/changelog
vendored
@ -1,8 +1,12 @@
|
|||||||
matrix-synapse-py3 (1.31.0+nmu1) UNRELEASED; urgency=medium
|
matrix-synapse-py3 (1.32.0) stable; urgency=medium
|
||||||
|
|
||||||
|
[ Dan Callahan ]
|
||||||
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
||||||
|
|
||||||
-- Dan Callahan <danc@element.io> Mon, 12 Apr 2021 13:07:36 +0000
|
[ Synapse Packaging team ]
|
||||||
|
* New synapse release 1.32.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
__version__ = "1.32.0rc1"
|
__version__ = "1.32.0"
|
||||||
|
|
||||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||||
# We import here so that we don't have to install a bunch of deps when
|
# We import here so that we don't have to install a bunch of deps when
|
||||||
|
@ -277,7 +277,7 @@ class LoggingContext:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: Optional[str] = None,
|
name: str,
|
||||||
parent_context: "Optional[LoggingContext]" = None,
|
parent_context: "Optional[LoggingContext]" = None,
|
||||||
request: Optional[ContextRequest] = None,
|
request: Optional[ContextRequest] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
@ -315,9 +315,7 @@ class LoggingContext:
|
|||||||
self.request = request
|
self.request = request
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
if self.request:
|
return self.name
|
||||||
return self.request.request_id
|
|
||||||
return "%s@%x" % (self.name, id(self))
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def current_context(cls) -> LoggingContextOrSentinel:
|
def current_context(cls) -> LoggingContextOrSentinel:
|
||||||
@ -694,17 +692,13 @@ def nested_logging_context(suffix: str) -> LoggingContext:
|
|||||||
"Starting nested logging context from sentinel context: metrics will be lost"
|
"Starting nested logging context from sentinel context: metrics will be lost"
|
||||||
)
|
)
|
||||||
parent_context = None
|
parent_context = None
|
||||||
prefix = ""
|
|
||||||
request = None
|
|
||||||
else:
|
else:
|
||||||
assert isinstance(curr_context, LoggingContext)
|
assert isinstance(curr_context, LoggingContext)
|
||||||
parent_context = curr_context
|
parent_context = curr_context
|
||||||
prefix = str(parent_context.name)
|
prefix = str(curr_context)
|
||||||
request = parent_context.request
|
|
||||||
return LoggingContext(
|
return LoggingContext(
|
||||||
prefix + "-" + suffix,
|
prefix + "-" + suffix,
|
||||||
parent_context=parent_context,
|
parent_context=parent_context,
|
||||||
request=request,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -895,7 +889,7 @@ def defer_to_threadpool(reactor, threadpool, f, *args, **kwargs):
|
|||||||
parent_context = curr_context
|
parent_context = curr_context
|
||||||
|
|
||||||
def g():
|
def g():
|
||||||
with LoggingContext(parent_context=parent_context):
|
with LoggingContext(str(curr_context), parent_context=parent_context):
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
|
||||||
return make_deferred_yieldable(threads.deferToThreadPool(reactor, threadpool, g))
|
return make_deferred_yieldable(threads.deferToThreadPool(reactor, threadpool, g))
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import TYPE_CHECKING, Dict, Optional, Set, Union
|
from typing import TYPE_CHECKING, Dict, Optional, Set
|
||||||
|
|
||||||
from prometheus_client.core import REGISTRY, Counter, Gauge
|
from prometheus_client.core import REGISTRY, Counter, Gauge
|
||||||
|
|
||||||
@ -198,7 +198,7 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar
|
|||||||
_background_process_start_count.labels(desc).inc()
|
_background_process_start_count.labels(desc).inc()
|
||||||
_background_process_in_flight_count.labels(desc).inc()
|
_background_process_in_flight_count.labels(desc).inc()
|
||||||
|
|
||||||
with BackgroundProcessLoggingContext(desc, count) as context:
|
with BackgroundProcessLoggingContext("%s-%s" % (desc, count)) as context:
|
||||||
try:
|
try:
|
||||||
ctx = noop_context_manager()
|
ctx = noop_context_manager()
|
||||||
if bg_start_span:
|
if bg_start_span:
|
||||||
@ -241,19 +241,12 @@ class BackgroundProcessLoggingContext(LoggingContext):
|
|||||||
processes.
|
processes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ["_id", "_proc"]
|
__slots__ = ["_proc"]
|
||||||
|
|
||||||
def __init__(self, name: str, id: Optional[Union[int, str]] = None):
|
def __init__(self, name: str):
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
self._id = id
|
|
||||||
|
|
||||||
self._proc = _BackgroundProcess(name, self)
|
self._proc = _BackgroundProcess(name, self)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
if self._id is not None:
|
|
||||||
return "%s-%s" % (self.name, self._id)
|
|
||||||
return "%s@%x" % (self.name, id(self))
|
|
||||||
|
|
||||||
def start(self, rusage: "Optional[resource._RUsage]"):
|
def start(self, rusage: "Optional[resource._RUsage]"):
|
||||||
"""Log context has started running (again)."""
|
"""Log context has started running (again)."""
|
||||||
|
|
||||||
|
@ -184,7 +184,7 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
|
|||||||
# a logcontext which we use for processing incoming commands. We declare it as a
|
# a logcontext which we use for processing incoming commands. We declare it as a
|
||||||
# background process so that the CPU stats get reported to prometheus.
|
# background process so that the CPU stats get reported to prometheus.
|
||||||
self._logging_context = BackgroundProcessLoggingContext(
|
self._logging_context = BackgroundProcessLoggingContext(
|
||||||
"replication-conn", self.conn_id
|
"replication-conn-%s" % (self.conn_id,)
|
||||||
)
|
)
|
||||||
|
|
||||||
def connectionMade(self):
|
def connectionMade(self):
|
||||||
|
@ -104,7 +104,13 @@ class Measure:
|
|||||||
"start",
|
"start",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, clock, name):
|
def __init__(self, clock, name: str):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
clock: A n object with a "time()" method, which returns the current
|
||||||
|
time in seconds.
|
||||||
|
name: The name of the metric to report.
|
||||||
|
"""
|
||||||
self.clock = clock
|
self.clock = clock
|
||||||
self.name = name
|
self.name = name
|
||||||
curr_context = current_context()
|
curr_context = current_context()
|
||||||
@ -117,10 +123,8 @@ class Measure:
|
|||||||
else:
|
else:
|
||||||
assert isinstance(curr_context, LoggingContext)
|
assert isinstance(curr_context, LoggingContext)
|
||||||
parent_context = curr_context
|
parent_context = curr_context
|
||||||
self._logging_context = LoggingContext(
|
self._logging_context = LoggingContext(str(curr_context), parent_context)
|
||||||
"Measure[%s]" % (self.name,), parent_context
|
self.start = None # type: Optional[int]
|
||||||
)
|
|
||||||
self.start = None
|
|
||||||
|
|
||||||
def __enter__(self) -> "Measure":
|
def __enter__(self) -> "Measure":
|
||||||
if self.start is not None:
|
if self.start is not None:
|
||||||
|
@ -137,7 +137,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
|||||||
]
|
]
|
||||||
self.assertCountEqual(log.keys(), expected_log_keys)
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
||||||
self.assertEqual(log["log"], "Hello there, wally!")
|
self.assertEqual(log["log"], "Hello there, wally!")
|
||||||
self.assertTrue(log["request"].startswith("name@"))
|
self.assertEqual(log["request"], "name")
|
||||||
|
|
||||||
def test_with_request_context(self):
|
def test_with_request_context(self):
|
||||||
"""
|
"""
|
||||||
@ -164,7 +164,9 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
|||||||
# Also set the requester to ensure the processing works.
|
# Also set the requester to ensure the processing works.
|
||||||
request.requester = "@foo:test"
|
request.requester = "@foo:test"
|
||||||
|
|
||||||
with LoggingContext(parent_context=request.logcontext):
|
with LoggingContext(
|
||||||
|
request.get_request_id(), parent_context=request.logcontext
|
||||||
|
):
|
||||||
logger.info("Hello there, %s!", "wally")
|
logger.info("Hello there, %s!", "wally")
|
||||||
|
|
||||||
log = self.get_log_line()
|
log = self.get_log_line()
|
||||||
|
@ -135,7 +135,7 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
with LoggingContext():
|
with LoggingContext("test-context"):
|
||||||
failure = self.get_failure(
|
failure = self.get_failure(
|
||||||
self.handler.on_receive_pdu(
|
self.handler.on_receive_pdu(
|
||||||
"test.serv", lying_event, sent_to_us_directly=True
|
"test.serv", lying_event, sent_to_us_directly=True
|
||||||
|
@ -230,8 +230,7 @@ class DescriptorTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_lookup():
|
def do_lookup():
|
||||||
with LoggingContext() as c1:
|
with LoggingContext("c1") as c1:
|
||||||
c1.name = "c1"
|
|
||||||
r = yield obj.fn(1)
|
r = yield obj.fn(1)
|
||||||
self.assertEqual(current_context(), c1)
|
self.assertEqual(current_context(), c1)
|
||||||
return r
|
return r
|
||||||
@ -273,8 +272,7 @@ class DescriptorTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_lookup():
|
def do_lookup():
|
||||||
with LoggingContext() as c1:
|
with LoggingContext("c1") as c1:
|
||||||
c1.name = "c1"
|
|
||||||
try:
|
try:
|
||||||
d = obj.fn(1)
|
d = obj.fn(1)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user