mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-01 16:46:07 -04:00
Use inline type hints in various other places (in synapse/
) (#10380)
This commit is contained in:
parent
c7603af1d0
commit
bf72d10dbf
79 changed files with 329 additions and 336 deletions
|
@ -69,7 +69,7 @@ def _get_requested_host(request: IRequest) -> bytes:
|
|||
return hostname
|
||||
|
||||
# no Host header, use the address/port that the request arrived on
|
||||
host = request.getHost() # type: Union[address.IPv4Address, address.IPv6Address]
|
||||
host: Union[address.IPv4Address, address.IPv6Address] = request.getHost()
|
||||
|
||||
hostname = host.host.encode("ascii")
|
||||
|
||||
|
|
|
@ -160,7 +160,7 @@ class _IPBlacklistingResolver:
|
|||
def resolveHostName(
|
||||
self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0
|
||||
) -> IResolutionReceiver:
|
||||
addresses = [] # type: List[IAddress]
|
||||
addresses: List[IAddress] = []
|
||||
|
||||
def _callback() -> None:
|
||||
has_bad_ip = False
|
||||
|
@ -333,9 +333,9 @@ class SimpleHttpClient:
|
|||
if self._ip_blacklist:
|
||||
# If we have an IP blacklist, we need to use a DNS resolver which
|
||||
# filters out blacklisted IP addresses, to prevent DNS rebinding.
|
||||
self.reactor = BlacklistingReactorWrapper(
|
||||
self.reactor: ISynapseReactor = BlacklistingReactorWrapper(
|
||||
hs.get_reactor(), self._ip_whitelist, self._ip_blacklist
|
||||
) # type: ISynapseReactor
|
||||
)
|
||||
else:
|
||||
self.reactor = hs.get_reactor()
|
||||
|
||||
|
@ -349,14 +349,14 @@ class SimpleHttpClient:
|
|||
pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
|
||||
pool.cachedConnectionTimeout = 2 * 60
|
||||
|
||||
self.agent = ProxyAgent(
|
||||
self.agent: IAgent = ProxyAgent(
|
||||
self.reactor,
|
||||
hs.get_reactor(),
|
||||
connectTimeout=15,
|
||||
contextFactory=self.hs.get_http_client_context_factory(),
|
||||
pool=pool,
|
||||
use_proxy=use_proxy,
|
||||
) # type: IAgent
|
||||
)
|
||||
|
||||
if self._ip_blacklist:
|
||||
# If we have an IP blacklist, we then install the blacklisting Agent
|
||||
|
@ -411,7 +411,7 @@ class SimpleHttpClient:
|
|||
cooperator=self._cooperator,
|
||||
)
|
||||
|
||||
request_deferred = treq.request(
|
||||
request_deferred: defer.Deferred = treq.request(
|
||||
method,
|
||||
uri,
|
||||
agent=self.agent,
|
||||
|
@ -421,7 +421,7 @@ class SimpleHttpClient:
|
|||
# response bodies.
|
||||
unbuffered=True,
|
||||
**self._extra_treq_args,
|
||||
) # type: defer.Deferred
|
||||
)
|
||||
|
||||
# we use our own timeout mechanism rather than treq's as a workaround
|
||||
# for https://twistedmatrix.com/trac/ticket/9534.
|
||||
|
@ -772,7 +772,7 @@ class BodyExceededMaxSize(Exception):
|
|||
class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which immediately errors upon receiving data."""
|
||||
|
||||
transport = None # type: Optional[ITCPTransport]
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(self, deferred: defer.Deferred):
|
||||
self.deferred = deferred
|
||||
|
@ -798,7 +798,7 @@ class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
|||
class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which reads body to a stream, erroring if the body exceeds a maximum size."""
|
||||
|
||||
transport = None # type: Optional[ITCPTransport]
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(
|
||||
self, stream: ByteWriteable, deferred: defer.Deferred, max_size: Optional[int]
|
||||
|
|
|
@ -106,7 +106,7 @@ class ByteParser(ByteWriteable, Generic[T], abc.ABC):
|
|||
the parsed data.
|
||||
"""
|
||||
|
||||
CONTENT_TYPE = abc.abstractproperty() # type: str # type: ignore
|
||||
CONTENT_TYPE: str = abc.abstractproperty() # type: ignore
|
||||
"""The expected content type of the response, e.g. `application/json`. If
|
||||
the content type doesn't match we fail the request.
|
||||
"""
|
||||
|
@ -327,11 +327,11 @@ class MatrixFederationHttpClient:
|
|||
|
||||
# We need to use a DNS resolver which filters out blacklisted IP
|
||||
# addresses, to prevent DNS rebinding.
|
||||
self.reactor = BlacklistingReactorWrapper(
|
||||
self.reactor: ISynapseReactor = BlacklistingReactorWrapper(
|
||||
hs.get_reactor(),
|
||||
hs.config.federation_ip_range_whitelist,
|
||||
hs.config.federation_ip_range_blacklist,
|
||||
) # type: ISynapseReactor
|
||||
)
|
||||
|
||||
user_agent = hs.version_string
|
||||
if hs.config.user_agent_suffix:
|
||||
|
@ -504,7 +504,7 @@ class MatrixFederationHttpClient:
|
|||
)
|
||||
|
||||
# Inject the span into the headers
|
||||
headers_dict = {} # type: Dict[bytes, List[bytes]]
|
||||
headers_dict: Dict[bytes, List[bytes]] = {}
|
||||
opentracing.inject_header_dict(headers_dict, request.destination)
|
||||
|
||||
headers_dict[b"User-Agent"] = [self.version_string_bytes]
|
||||
|
@ -533,9 +533,9 @@ class MatrixFederationHttpClient:
|
|||
destination_bytes, method_bytes, url_to_sign_bytes, json
|
||||
)
|
||||
data = encode_canonical_json(json)
|
||||
producer = QuieterFileBodyProducer(
|
||||
producer: Optional[IBodyProducer] = QuieterFileBodyProducer(
|
||||
BytesIO(data), cooperator=self._cooperator
|
||||
) # type: Optional[IBodyProducer]
|
||||
)
|
||||
else:
|
||||
producer = None
|
||||
auth_headers = self.build_auth_headers(
|
||||
|
|
|
@ -81,7 +81,7 @@ def return_json_error(f: failure.Failure, request: SynapseRequest) -> None:
|
|||
|
||||
if f.check(SynapseError):
|
||||
# mypy doesn't understand that f.check asserts the type.
|
||||
exc = f.value # type: SynapseError # type: ignore
|
||||
exc: SynapseError = f.value # type: ignore
|
||||
error_code = exc.code
|
||||
error_dict = exc.error_dict()
|
||||
|
||||
|
@ -132,7 +132,7 @@ def return_html_error(
|
|||
"""
|
||||
if f.check(CodeMessageException):
|
||||
# mypy doesn't understand that f.check asserts the type.
|
||||
cme = f.value # type: CodeMessageException # type: ignore
|
||||
cme: CodeMessageException = f.value # type: ignore
|
||||
code = cme.code
|
||||
msg = cme.msg
|
||||
|
||||
|
@ -404,7 +404,7 @@ class JsonResource(DirectServeJsonResource):
|
|||
key word arguments to pass to the callback
|
||||
"""
|
||||
# At this point the path must be bytes.
|
||||
request_path_bytes = request.path # type: bytes # type: ignore
|
||||
request_path_bytes: bytes = request.path # type: ignore
|
||||
request_path = request_path_bytes.decode("ascii")
|
||||
# Treat HEAD requests as GET requests.
|
||||
request_method = request.method
|
||||
|
@ -557,7 +557,7 @@ class _ByteProducer:
|
|||
request: Request,
|
||||
iterator: Iterator[bytes],
|
||||
):
|
||||
self._request = request # type: Optional[Request]
|
||||
self._request: Optional[Request] = request
|
||||
self._iterator = iterator
|
||||
self._paused = False
|
||||
|
||||
|
|
|
@ -205,7 +205,7 @@ def parse_string(
|
|||
parameter is present, must be one of a list of allowed values and
|
||||
is not one of those allowed values.
|
||||
"""
|
||||
args = request.args # type: Dict[bytes, List[bytes]] # type: ignore
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
return parse_string_from_args(
|
||||
args,
|
||||
name,
|
||||
|
|
|
@ -64,16 +64,16 @@ class SynapseRequest(Request):
|
|||
def __init__(self, channel, *args, max_request_body_size=1024, **kw):
|
||||
Request.__init__(self, channel, *args, **kw)
|
||||
self._max_request_body_size = max_request_body_size
|
||||
self.site = channel.site # type: SynapseSite
|
||||
self.site: SynapseSite = channel.site
|
||||
self._channel = channel # this is used by the tests
|
||||
self.start_time = 0.0
|
||||
|
||||
# The requester, if authenticated. For federation requests this is the
|
||||
# server name, for client requests this is the Requester object.
|
||||
self._requester = None # type: Optional[Union[Requester, str]]
|
||||
self._requester: Optional[Union[Requester, str]] = None
|
||||
|
||||
# we can't yet create the logcontext, as we don't know the method.
|
||||
self.logcontext = None # type: Optional[LoggingContext]
|
||||
self.logcontext: Optional[LoggingContext] = None
|
||||
|
||||
global _next_request_seq
|
||||
self.request_seq = _next_request_seq
|
||||
|
@ -152,7 +152,7 @@ class SynapseRequest(Request):
|
|||
Returns:
|
||||
The redacted URI as a string.
|
||||
"""
|
||||
uri = self.uri # type: Union[bytes, str]
|
||||
uri: Union[bytes, str] = self.uri
|
||||
if isinstance(uri, bytes):
|
||||
uri = uri.decode("ascii", errors="replace")
|
||||
return redact_uri(uri)
|
||||
|
@ -167,7 +167,7 @@ class SynapseRequest(Request):
|
|||
Returns:
|
||||
The request method as a string.
|
||||
"""
|
||||
method = self.method # type: Union[bytes, str]
|
||||
method: Union[bytes, str] = self.method
|
||||
if isinstance(method, bytes):
|
||||
return self.method.decode("ascii")
|
||||
return method
|
||||
|
@ -434,8 +434,8 @@ class XForwardedForRequest(SynapseRequest):
|
|||
"""
|
||||
|
||||
# the client IP and ssl flag, as extracted from the headers.
|
||||
_forwarded_for = None # type: Optional[_XForwardedForAddress]
|
||||
_forwarded_https = False # type: bool
|
||||
_forwarded_for: "Optional[_XForwardedForAddress]" = None
|
||||
_forwarded_https: bool = False
|
||||
|
||||
def requestReceived(self, command, path, version):
|
||||
# this method is called by the Channel once the full request has been
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue