mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-05-02 11:16:07 -04:00
Reduce the number of "untyped defs" (#12716)
This commit is contained in:
parent
de1e599b9d
commit
17e1eb7749
16 changed files with 142 additions and 69 deletions
|
@ -43,8 +43,10 @@ from twisted.internet import defer, error as twisted_error, protocol, ssl
|
|||
from twisted.internet.address import IPv4Address, IPv6Address
|
||||
from twisted.internet.interfaces import (
|
||||
IAddress,
|
||||
IDelayedCall,
|
||||
IHostResolution,
|
||||
IReactorPluggableNameResolver,
|
||||
IReactorTime,
|
||||
IResolutionReceiver,
|
||||
ITCPTransport,
|
||||
)
|
||||
|
@ -121,13 +123,15 @@ def check_against_blacklist(
|
|||
_EPSILON = 0.00000001
|
||||
|
||||
|
||||
def _make_scheduler(reactor):
|
||||
def _make_scheduler(
|
||||
reactor: IReactorTime,
|
||||
) -> Callable[[Callable[[], object]], IDelayedCall]:
|
||||
"""Makes a schedular suitable for a Cooperator using the given reactor.
|
||||
|
||||
(This is effectively just a copy from `twisted.internet.task`)
|
||||
"""
|
||||
|
||||
def _scheduler(x):
|
||||
def _scheduler(x: Callable[[], object]) -> IDelayedCall:
|
||||
return reactor.callLater(_EPSILON, x)
|
||||
|
||||
return _scheduler
|
||||
|
@ -775,7 +779,7 @@ class SimpleHttpClient:
|
|||
)
|
||||
|
||||
|
||||
def _timeout_to_request_timed_out_error(f: Failure):
|
||||
def _timeout_to_request_timed_out_error(f: Failure) -> Failure:
|
||||
if f.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
|
||||
# The TCP connection has its own timeout (set by the 'connectTimeout' param
|
||||
# on the Agent), which raises twisted_error.TimeoutError exception.
|
||||
|
@ -809,7 +813,7 @@ class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
|||
def __init__(self, deferred: defer.Deferred):
|
||||
self.deferred = deferred
|
||||
|
||||
def _maybe_fail(self):
|
||||
def _maybe_fail(self) -> None:
|
||||
"""
|
||||
Report a max size exceed error and disconnect the first time this is called.
|
||||
"""
|
||||
|
@ -933,12 +937,12 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
|||
Do not use this since it allows an attacker to intercept your communications.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self._context = SSL.Context(SSL.SSLv23_METHOD)
|
||||
self._context.set_verify(VERIFY_NONE, lambda *_: False)
|
||||
|
||||
def getContext(self, hostname=None, port=None):
|
||||
return self._context
|
||||
|
||||
def creatorForNetloc(self, hostname, port):
|
||||
def creatorForNetloc(self, hostname: bytes, port: int):
|
||||
return self
|
||||
|
|
|
@ -239,7 +239,7 @@ class MatrixHostnameEndpointFactory:
|
|||
|
||||
self._srv_resolver = srv_resolver
|
||||
|
||||
def endpointForURI(self, parsed_uri: URI):
|
||||
def endpointForURI(self, parsed_uri: URI) -> "MatrixHostnameEndpoint":
|
||||
return MatrixHostnameEndpoint(
|
||||
self._reactor,
|
||||
self._proxy_reactor,
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
import logging
|
||||
import random
|
||||
import time
|
||||
from typing import Callable, Dict, List
|
||||
from typing import Any, Callable, Dict, List
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -109,7 +109,7 @@ class SrvResolver:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
dns_client=client,
|
||||
dns_client: Any = client,
|
||||
cache: Dict[bytes, List[Server]] = SERVER_CACHE,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
):
|
||||
|
|
|
@ -74,9 +74,9 @@ _well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache("well-known")
|
|||
_had_valid_well_known_cache: TTLCache[bytes, bool] = TTLCache("had-valid-well-known")
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class WellKnownLookupResult:
|
||||
delegated_server = attr.ib()
|
||||
delegated_server: Optional[bytes]
|
||||
|
||||
|
||||
class WellKnownResolver:
|
||||
|
@ -336,4 +336,4 @@ def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]:
|
|||
class _FetchWellKnownFailure(Exception):
|
||||
# True if we didn't get a non-5xx HTTP response, i.e. this may or may not be
|
||||
# a temporary failure.
|
||||
temporary = attr.ib()
|
||||
temporary: bool = attr.ib()
|
||||
|
|
|
@ -23,6 +23,8 @@ from http import HTTPStatus
|
|||
from io import BytesIO, StringIO
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
|
@ -44,7 +46,7 @@ from typing_extensions import Literal
|
|||
from twisted.internet import defer
|
||||
from twisted.internet.error import DNSLookupError
|
||||
from twisted.internet.interfaces import IReactorTime
|
||||
from twisted.internet.task import _EPSILON, Cooperator
|
||||
from twisted.internet.task import Cooperator
|
||||
from twisted.web.client import ResponseFailed
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IBodyProducer, IResponse
|
||||
|
@ -58,11 +60,13 @@ from synapse.api.errors import (
|
|||
RequestSendFailed,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.crypto.context_factory import FederationPolicyForHTTPS
|
||||
from synapse.http import QuieterFileBodyProducer
|
||||
from synapse.http.client import (
|
||||
BlacklistingAgentWrapper,
|
||||
BodyExceededMaxSize,
|
||||
ByteWriteable,
|
||||
_make_scheduler,
|
||||
encode_query_args,
|
||||
read_body_with_max_size,
|
||||
)
|
||||
|
@ -181,7 +185,7 @@ class JsonParser(ByteParser[Union[JsonDict, list]]):
|
|||
|
||||
CONTENT_TYPE = "application/json"
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self._buffer = StringIO()
|
||||
self._binary_wrapper = BinaryIOWrapper(self._buffer)
|
||||
|
||||
|
@ -299,7 +303,9 @@ async def _handle_response(
|
|||
class BinaryIOWrapper:
|
||||
"""A wrapper for a TextIO which converts from bytes on the fly."""
|
||||
|
||||
def __init__(self, file: typing.TextIO, encoding="utf-8", errors="strict"):
|
||||
def __init__(
|
||||
self, file: typing.TextIO, encoding: str = "utf-8", errors: str = "strict"
|
||||
):
|
||||
self.decoder = codecs.getincrementaldecoder(encoding)(errors)
|
||||
self.file = file
|
||||
|
||||
|
@ -317,7 +323,11 @@ class MatrixFederationHttpClient:
|
|||
requests.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer", tls_client_options_factory):
|
||||
def __init__(
|
||||
self,
|
||||
hs: "HomeServer",
|
||||
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
|
||||
):
|
||||
self.hs = hs
|
||||
self.signing_key = hs.signing_key
|
||||
self.server_name = hs.hostname
|
||||
|
@ -348,10 +358,7 @@ class MatrixFederationHttpClient:
|
|||
self.version_string_bytes = hs.version_string.encode("ascii")
|
||||
self.default_timeout = 60
|
||||
|
||||
def schedule(x):
|
||||
self.reactor.callLater(_EPSILON, x)
|
||||
|
||||
self._cooperator = Cooperator(scheduler=schedule)
|
||||
self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor))
|
||||
|
||||
self._sleeper = AwakenableSleeper(self.reactor)
|
||||
|
||||
|
@ -364,7 +371,7 @@ class MatrixFederationHttpClient:
|
|||
self,
|
||||
request: MatrixFederationRequest,
|
||||
try_trailing_slash_on_400: bool = False,
|
||||
**send_request_args,
|
||||
**send_request_args: Any,
|
||||
) -> IResponse:
|
||||
"""Wrapper for _send_request which can optionally retry the request
|
||||
upon receiving a combination of a 400 HTTP response code and a
|
||||
|
@ -1159,7 +1166,7 @@ class MatrixFederationHttpClient:
|
|||
self,
|
||||
destination: str,
|
||||
path: str,
|
||||
output_stream,
|
||||
output_stream: BinaryIO,
|
||||
args: Optional[QueryParams] = None,
|
||||
retry_on_dns_fail: bool = True,
|
||||
max_size: Optional[int] = None,
|
||||
|
@ -1250,10 +1257,10 @@ class MatrixFederationHttpClient:
|
|||
return length, headers
|
||||
|
||||
|
||||
def _flatten_response_never_received(e):
|
||||
def _flatten_response_never_received(e: BaseException) -> str:
|
||||
if hasattr(e, "reasons"):
|
||||
reasons = ", ".join(
|
||||
_flatten_response_never_received(f.value) for f in e.reasons
|
||||
_flatten_response_never_received(f.value) for f in e.reasons # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
return "%s:[%s]" % (type(e).__name__, reasons)
|
||||
|
|
|
@ -162,7 +162,7 @@ class RequestMetrics:
|
|||
with _in_flight_requests_lock:
|
||||
_in_flight_requests.add(self)
|
||||
|
||||
def stop(self, time_sec, response_code, sent_bytes):
|
||||
def stop(self, time_sec: float, response_code: int, sent_bytes: int) -> None:
|
||||
with _in_flight_requests_lock:
|
||||
_in_flight_requests.discard(self)
|
||||
|
||||
|
@ -186,13 +186,13 @@ class RequestMetrics:
|
|||
)
|
||||
return
|
||||
|
||||
response_code = str(response_code)
|
||||
response_code_str = str(response_code)
|
||||
|
||||
outgoing_responses_counter.labels(self.method, response_code).inc()
|
||||
outgoing_responses_counter.labels(self.method, response_code_str).inc()
|
||||
|
||||
response_count.labels(self.method, self.name, tag).inc()
|
||||
|
||||
response_timer.labels(self.method, self.name, tag, response_code).observe(
|
||||
response_timer.labels(self.method, self.name, tag, response_code_str).observe(
|
||||
time_sec - self.start_ts
|
||||
)
|
||||
|
||||
|
@ -221,7 +221,7 @@ class RequestMetrics:
|
|||
# flight.
|
||||
self.update_metrics()
|
||||
|
||||
def update_metrics(self):
|
||||
def update_metrics(self) -> None:
|
||||
"""Updates the in flight metrics with values from this request."""
|
||||
if not self.start_context:
|
||||
logger.error(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue