mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-12-28 14:59:25 -05:00
Update type annotations for compatiblity with prometheus_client 0.14 (#12389)
Principally, `prometheus_client.REGISTRY.register` now requires its argument to extend `prometheus_client.Collector`. Additionally, `Gauge.set` is now annotated so that passing `Optional[int]` causes an error.
This commit is contained in:
parent
793d03e2c5
commit
ae01a7edd3
1
changelog.d/12389.misc
Normal file
1
changelog.d/12389.misc
Normal file
@ -0,0 +1 @@
|
||||
Update type annotations for compatiblity with prometheus_client 0.14.
|
@ -1,4 +1,5 @@
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -52,12 +53,13 @@ from synapse.metrics._exposition import (
|
||||
start_http_server,
|
||||
)
|
||||
from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
METRICS_PREFIX = "/_synapse/metrics"
|
||||
|
||||
all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {}
|
||||
all_gauges: Dict[str, Collector] = {}
|
||||
|
||||
HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")
|
||||
|
||||
@ -78,11 +80,10 @@ RegistryProxy = cast(CollectorRegistry, _RegistryProxy)
|
||||
|
||||
|
||||
@attr.s(slots=True, hash=True, auto_attribs=True)
|
||||
class LaterGauge:
|
||||
|
||||
class LaterGauge(Collector):
|
||||
name: str
|
||||
desc: str
|
||||
labels: Optional[Iterable[str]] = attr.ib(hash=False)
|
||||
labels: Optional[Sequence[str]] = attr.ib(hash=False)
|
||||
# callback: should either return a value (if there are no labels for this metric),
|
||||
# or dict mapping from a label tuple to a value
|
||||
caller: Callable[
|
||||
@ -125,7 +126,7 @@ class LaterGauge:
|
||||
MetricsEntry = TypeVar("MetricsEntry")
|
||||
|
||||
|
||||
class InFlightGauge(Generic[MetricsEntry]):
|
||||
class InFlightGauge(Generic[MetricsEntry], Collector):
|
||||
"""Tracks number of things (e.g. requests, Measure blocks, etc) in flight
|
||||
at any given time.
|
||||
|
||||
@ -246,7 +247,7 @@ class InFlightGauge(Generic[MetricsEntry]):
|
||||
all_gauges[self.name] = self
|
||||
|
||||
|
||||
class GaugeBucketCollector:
|
||||
class GaugeBucketCollector(Collector):
|
||||
"""Like a Histogram, but the buckets are Gauges which are updated atomically.
|
||||
|
||||
The data is updated by calling `update_data` with an iterable of measurements.
|
||||
@ -340,7 +341,7 @@ class GaugeBucketCollector:
|
||||
#
|
||||
|
||||
|
||||
class CPUMetrics:
|
||||
class CPUMetrics(Collector):
|
||||
def __init__(self) -> None:
|
||||
ticks_per_sec = 100
|
||||
try:
|
||||
@ -470,6 +471,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None:
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Collector",
|
||||
"MetricsResource",
|
||||
"generate_latest",
|
||||
"start_http_server",
|
||||
|
@ -30,6 +30,8 @@ from prometheus_client.core import (
|
||||
|
||||
from twisted.internet import task
|
||||
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
"""Prometheus metrics for garbage collection"""
|
||||
|
||||
|
||||
@ -71,7 +73,7 @@ gc_time = Histogram(
|
||||
)
|
||||
|
||||
|
||||
class GCCounts:
|
||||
class GCCounts(Collector):
|
||||
def collect(self) -> Iterable[Metric]:
|
||||
cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"])
|
||||
for n, m in enumerate(gc.get_count()):
|
||||
@ -135,7 +137,7 @@ def install_gc_manager() -> None:
|
||||
#
|
||||
|
||||
|
||||
class PyPyGCStats:
|
||||
class PyPyGCStats(Collector):
|
||||
def collect(self) -> Iterable[Metric]:
|
||||
|
||||
# @stats is a pretty-printer object with __str__() returning a nice table,
|
||||
|
@ -21,6 +21,8 @@ from prometheus_client.core import REGISTRY, GaugeMetricFamily
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
#
|
||||
# Twisted reactor metrics
|
||||
#
|
||||
@ -54,7 +56,7 @@ class EpollWrapper:
|
||||
return getattr(self._poller, item)
|
||||
|
||||
|
||||
class ReactorLastSeenMetric:
|
||||
class ReactorLastSeenMetric(Collector):
|
||||
def __init__(self, epoll_wrapper: EpollWrapper):
|
||||
self._epoll_wrapper = epoll_wrapper
|
||||
|
||||
|
31
synapse/metrics/_types.py
Normal file
31
synapse/metrics/_types.py
Normal file
@ -0,0 +1,31 @@
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Iterable
|
||||
|
||||
from prometheus_client import Metric
|
||||
|
||||
try:
|
||||
from prometheus_client.registry import Collector
|
||||
except ImportError:
|
||||
# prometheus_client.Collector is new as of prometheus 0.14. We redefine it here
|
||||
# for compatibility with earlier versions.
|
||||
class _Collector(ABC):
|
||||
@abstractmethod
|
||||
def collect(self) -> Iterable[Metric]:
|
||||
pass
|
||||
|
||||
Collector = _Collector # type: ignore
|
@ -46,6 +46,7 @@ from synapse.logging.opentracing import (
|
||||
noop_context_manager,
|
||||
start_active_span,
|
||||
)
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import resource
|
||||
@ -127,7 +128,7 @@ _background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set(
|
||||
_bg_metrics_lock = threading.Lock()
|
||||
|
||||
|
||||
class _Collector:
|
||||
class _Collector(Collector):
|
||||
"""A custom metrics collector for the background process metrics.
|
||||
|
||||
Ensures that all of the metrics are up-to-date with any in-flight processes
|
||||
|
@ -16,11 +16,13 @@ import ctypes
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Iterable, Optional
|
||||
from typing import Iterable, Optional, overload
|
||||
|
||||
from prometheus_client import Metric
|
||||
from prometheus_client import REGISTRY, Metric
|
||||
from typing_extensions import Literal
|
||||
|
||||
from synapse.metrics import REGISTRY, GaugeMetricFamily
|
||||
from synapse.metrics import GaugeMetricFamily
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -59,6 +61,16 @@ def _setup_jemalloc_stats() -> None:
|
||||
|
||||
jemalloc = ctypes.CDLL(jemalloc_path)
|
||||
|
||||
@overload
|
||||
def _mallctl(
|
||||
name: str, read: Literal[True] = True, write: Optional[int] = None
|
||||
) -> int:
|
||||
...
|
||||
|
||||
@overload
|
||||
def _mallctl(name: str, read: Literal[False], write: Optional[int] = None) -> None:
|
||||
...
|
||||
|
||||
def _mallctl(
|
||||
name: str, read: bool = True, write: Optional[int] = None
|
||||
) -> Optional[int]:
|
||||
@ -134,7 +146,7 @@ def _setup_jemalloc_stats() -> None:
|
||||
except Exception as e:
|
||||
logger.warning("Failed to reload jemalloc stats: %s", e)
|
||||
|
||||
class JemallocCollector:
|
||||
class JemallocCollector(Collector):
|
||||
"""Metrics for internal jemalloc stats."""
|
||||
|
||||
def collect(self) -> Iterable[Metric]:
|
||||
|
@ -200,9 +200,7 @@ class PersistEventsStore:
|
||||
if stream < 0:
|
||||
# backfilled events have negative stream orderings, so we don't
|
||||
# want to set the event_persisted_position to that.
|
||||
synapse.metrics.event_persisted_position.set(
|
||||
events_and_contexts[-1][0].internal_metadata.stream_ordering
|
||||
)
|
||||
synapse.metrics.event_persisted_position.set(stream)
|
||||
|
||||
for event, context in events_and_contexts:
|
||||
if context.app_service:
|
||||
|
Loading…
Reference in New Issue
Block a user