Pārlūkot izejas kodu

Update type annotations for compatiblity with prometheus_client 0.14 (#12389)

Principally, `prometheus_client.REGISTRY.register` now requires its argument to
extend `prometheus_client.Collector`.

Additionally, `Gauge.set` is now annotated so that passing `Optional[int]`
causes an error.
tags/v1.57.0rc1
Richard van der Hoff pirms 2 gadiem
committed by GitHub
vecāks
revīzija
ae01a7edd3
Šim parakstam datu bāzē netika atrasta zināma atslēga GPG atslēgas ID: 4AEE18F83AFDEB23
8 mainītis faili ar 67 papildinājumiem un 18 dzēšanām
  1. +1
    -0
      changelog.d/12389.misc
  2. +9
    -7
      synapse/metrics/__init__.py
  3. +4
    -2
      synapse/metrics/_gc.py
  4. +3
    -1
      synapse/metrics/_reactor_metrics.py
  5. +31
    -0
      synapse/metrics/_types.py
  6. +2
    -1
      synapse/metrics/background_process_metrics.py
  7. +16
    -4
      synapse/metrics/jemalloc.py
  8. +1
    -3
      synapse/storage/databases/main/events.py

+ 1
- 0
changelog.d/12389.misc Parādīt failu

@@ -0,0 +1 @@
Update type annotations for compatiblity with prometheus_client 0.14.

+ 9
- 7
synapse/metrics/__init__.py Parādīt failu

@@ -1,4 +1,5 @@
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -52,12 +53,13 @@ from synapse.metrics._exposition import (
start_http_server,
)
from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager
from synapse.metrics._types import Collector

logger = logging.getLogger(__name__)

METRICS_PREFIX = "/_synapse/metrics"

all_gauges: "Dict[str, Union[LaterGauge, InFlightGauge]]" = {}
all_gauges: Dict[str, Collector] = {}

HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")

@@ -78,11 +80,10 @@ RegistryProxy = cast(CollectorRegistry, _RegistryProxy)


@attr.s(slots=True, hash=True, auto_attribs=True)
class LaterGauge:

class LaterGauge(Collector):
name: str
desc: str
labels: Optional[Iterable[str]] = attr.ib(hash=False)
labels: Optional[Sequence[str]] = attr.ib(hash=False)
# callback: should either return a value (if there are no labels for this metric),
# or dict mapping from a label tuple to a value
caller: Callable[
@@ -125,7 +126,7 @@ class LaterGauge:
MetricsEntry = TypeVar("MetricsEntry")


class InFlightGauge(Generic[MetricsEntry]):
class InFlightGauge(Generic[MetricsEntry], Collector):
"""Tracks number of things (e.g. requests, Measure blocks, etc) in flight
at any given time.

@@ -246,7 +247,7 @@ class InFlightGauge(Generic[MetricsEntry]):
all_gauges[self.name] = self


class GaugeBucketCollector:
class GaugeBucketCollector(Collector):
"""Like a Histogram, but the buckets are Gauges which are updated atomically.

The data is updated by calling `update_data` with an iterable of measurements.
@@ -340,7 +341,7 @@ class GaugeBucketCollector:
#


class CPUMetrics:
class CPUMetrics(Collector):
def __init__(self) -> None:
ticks_per_sec = 100
try:
@@ -470,6 +471,7 @@ def register_threadpool(name: str, threadpool: ThreadPool) -> None:


__all__ = [
"Collector",
"MetricsResource",
"generate_latest",
"start_http_server",


+ 4
- 2
synapse/metrics/_gc.py Parādīt failu

@@ -30,6 +30,8 @@ from prometheus_client.core import (

from twisted.internet import task

from synapse.metrics._types import Collector

"""Prometheus metrics for garbage collection"""


@@ -71,7 +73,7 @@ gc_time = Histogram(
)


class GCCounts:
class GCCounts(Collector):
def collect(self) -> Iterable[Metric]:
cm = GaugeMetricFamily("python_gc_counts", "GC object counts", labels=["gen"])
for n, m in enumerate(gc.get_count()):
@@ -135,7 +137,7 @@ def install_gc_manager() -> None:
#


class PyPyGCStats:
class PyPyGCStats(Collector):
def collect(self) -> Iterable[Metric]:

# @stats is a pretty-printer object with __str__() returning a nice table,


+ 3
- 1
synapse/metrics/_reactor_metrics.py Parādīt failu

@@ -21,6 +21,8 @@ from prometheus_client.core import REGISTRY, GaugeMetricFamily

from twisted.internet import reactor

from synapse.metrics._types import Collector

#
# Twisted reactor metrics
#
@@ -54,7 +56,7 @@ class EpollWrapper:
return getattr(self._poller, item)


class ReactorLastSeenMetric:
class ReactorLastSeenMetric(Collector):
def __init__(self, epoll_wrapper: EpollWrapper):
self._epoll_wrapper = epoll_wrapper



+ 31
- 0
synapse/metrics/_types.py Parādīt failu

@@ -0,0 +1,31 @@
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from abc import ABC, abstractmethod
from typing import Iterable

from prometheus_client import Metric

try:
from prometheus_client.registry import Collector
except ImportError:
# prometheus_client.Collector is new as of prometheus 0.14. We redefine it here
# for compatibility with earlier versions.
class _Collector(ABC):
@abstractmethod
def collect(self) -> Iterable[Metric]:
pass

Collector = _Collector # type: ignore

+ 2
- 1
synapse/metrics/background_process_metrics.py Parādīt failu

@@ -46,6 +46,7 @@ from synapse.logging.opentracing import (
noop_context_manager,
start_active_span,
)
from synapse.metrics._types import Collector

if TYPE_CHECKING:
import resource
@@ -127,7 +128,7 @@ _background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set(
_bg_metrics_lock = threading.Lock()


class _Collector:
class _Collector(Collector):
"""A custom metrics collector for the background process metrics.

Ensures that all of the metrics are up-to-date with any in-flight processes


+ 16
- 4
synapse/metrics/jemalloc.py Parādīt failu

@@ -16,11 +16,13 @@ import ctypes
import logging
import os
import re
from typing import Iterable, Optional
from typing import Iterable, Optional, overload

from prometheus_client import Metric
from prometheus_client import REGISTRY, Metric
from typing_extensions import Literal

from synapse.metrics import REGISTRY, GaugeMetricFamily
from synapse.metrics import GaugeMetricFamily
from synapse.metrics._types import Collector

logger = logging.getLogger(__name__)

@@ -59,6 +61,16 @@ def _setup_jemalloc_stats() -> None:

jemalloc = ctypes.CDLL(jemalloc_path)

@overload
def _mallctl(
name: str, read: Literal[True] = True, write: Optional[int] = None
) -> int:
...

@overload
def _mallctl(name: str, read: Literal[False], write: Optional[int] = None) -> None:
...

def _mallctl(
name: str, read: bool = True, write: Optional[int] = None
) -> Optional[int]:
@@ -134,7 +146,7 @@ def _setup_jemalloc_stats() -> None:
except Exception as e:
logger.warning("Failed to reload jemalloc stats: %s", e)

class JemallocCollector:
class JemallocCollector(Collector):
"""Metrics for internal jemalloc stats."""

def collect(self) -> Iterable[Metric]:


+ 1
- 3
synapse/storage/databases/main/events.py Parādīt failu

@@ -200,9 +200,7 @@ class PersistEventsStore:
if stream < 0:
# backfilled events have negative stream orderings, so we don't
# want to set the event_persisted_position to that.
synapse.metrics.event_persisted_position.set(
events_and_contexts[-1][0].internal_metadata.stream_ordering
)
synapse.metrics.event_persisted_position.set(stream)

for event, context in events_and_contexts:
if context.app_service:


Notiek ielāde…
Atcelt
Saglabāt