mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-01 13:56:07 -04:00
Use inline type hints in various other places (in synapse/
) (#10380)
This commit is contained in:
parent
c7603af1d0
commit
bf72d10dbf
79 changed files with 329 additions and 336 deletions
|
@ -110,9 +110,9 @@ class RemoteHandler(logging.Handler):
|
|||
self.port = port
|
||||
self.maximum_buffer = maximum_buffer
|
||||
|
||||
self._buffer = deque() # type: Deque[logging.LogRecord]
|
||||
self._connection_waiter = None # type: Optional[Deferred]
|
||||
self._producer = None # type: Optional[LogProducer]
|
||||
self._buffer: Deque[logging.LogRecord] = deque()
|
||||
self._connection_waiter: Optional[Deferred] = None
|
||||
self._producer: Optional[LogProducer] = None
|
||||
|
||||
# Connect without DNS lookups if it's a direct IP.
|
||||
if _reactor is None:
|
||||
|
@ -123,9 +123,9 @@ class RemoteHandler(logging.Handler):
|
|||
try:
|
||||
ip = ip_address(self.host)
|
||||
if isinstance(ip, IPv4Address):
|
||||
endpoint = TCP4ClientEndpoint(
|
||||
endpoint: IStreamClientEndpoint = TCP4ClientEndpoint(
|
||||
_reactor, self.host, self.port
|
||||
) # type: IStreamClientEndpoint
|
||||
)
|
||||
elif isinstance(ip, IPv6Address):
|
||||
endpoint = TCP6ClientEndpoint(_reactor, self.host, self.port)
|
||||
else:
|
||||
|
@ -165,7 +165,7 @@ class RemoteHandler(logging.Handler):
|
|||
def writer(result: Protocol) -> None:
|
||||
# Force recognising transport as a Connection and not the more
|
||||
# generic ITransport.
|
||||
transport = result.transport # type: Connection # type: ignore
|
||||
transport: Connection = result.transport # type: ignore
|
||||
|
||||
# We have a connection. If we already have a producer, and its
|
||||
# transport is the same, just trigger a resumeProducing.
|
||||
|
@ -188,7 +188,7 @@ class RemoteHandler(logging.Handler):
|
|||
self._producer.resumeProducing()
|
||||
self._connection_waiter = None
|
||||
|
||||
deferred = self._service.whenConnected(failAfterFailures=1) # type: Deferred
|
||||
deferred: Deferred = self._service.whenConnected(failAfterFailures=1)
|
||||
deferred.addCallbacks(writer, fail)
|
||||
self._connection_waiter = deferred
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ def parse_drain_configs(
|
|||
DrainType.CONSOLE_JSON,
|
||||
DrainType.FILE_JSON,
|
||||
):
|
||||
formatter = "json" # type: Optional[str]
|
||||
formatter: Optional[str] = "json"
|
||||
elif logging_type in (
|
||||
DrainType.CONSOLE_JSON_TERSE,
|
||||
DrainType.NETWORK_JSON_TERSE,
|
||||
|
|
|
@ -113,13 +113,13 @@ class ContextResourceUsage:
|
|||
self.reset()
|
||||
else:
|
||||
# FIXME: mypy can't infer the types set via reset() above, so specify explicitly for now
|
||||
self.ru_utime = copy_from.ru_utime # type: float
|
||||
self.ru_stime = copy_from.ru_stime # type: float
|
||||
self.db_txn_count = copy_from.db_txn_count # type: int
|
||||
self.ru_utime: float = copy_from.ru_utime
|
||||
self.ru_stime: float = copy_from.ru_stime
|
||||
self.db_txn_count: int = copy_from.db_txn_count
|
||||
|
||||
self.db_txn_duration_sec = copy_from.db_txn_duration_sec # type: float
|
||||
self.db_sched_duration_sec = copy_from.db_sched_duration_sec # type: float
|
||||
self.evt_db_fetch_count = copy_from.evt_db_fetch_count # type: int
|
||||
self.db_txn_duration_sec: float = copy_from.db_txn_duration_sec
|
||||
self.db_sched_duration_sec: float = copy_from.db_sched_duration_sec
|
||||
self.evt_db_fetch_count: int = copy_from.evt_db_fetch_count
|
||||
|
||||
def copy(self) -> "ContextResourceUsage":
|
||||
return ContextResourceUsage(copy_from=self)
|
||||
|
@ -289,12 +289,12 @@ class LoggingContext:
|
|||
|
||||
# The thread resource usage when the logcontext became active. None
|
||||
# if the context is not currently active.
|
||||
self.usage_start = None # type: Optional[resource._RUsage]
|
||||
self.usage_start: Optional[resource._RUsage] = None
|
||||
|
||||
self.main_thread = get_thread_id()
|
||||
self.request = None
|
||||
self.tag = ""
|
||||
self.scope = None # type: Optional[_LogContextScope]
|
||||
self.scope: Optional["_LogContextScope"] = None
|
||||
|
||||
# keep track of whether we have hit the __exit__ block for this context
|
||||
# (suggesting that the the thing that created the context thinks it should
|
||||
|
|
|
@ -251,7 +251,7 @@ try:
|
|||
except Exception:
|
||||
logger.exception("Failed to report span")
|
||||
|
||||
RustReporter = _WrappedRustReporter # type: Optional[Type[_WrappedRustReporter]]
|
||||
RustReporter: Optional[Type[_WrappedRustReporter]] = _WrappedRustReporter
|
||||
except ImportError:
|
||||
RustReporter = None
|
||||
|
||||
|
@ -286,7 +286,7 @@ class SynapseBaggage:
|
|||
# Block everything by default
|
||||
# A regex which matches the server_names to expose traces for.
|
||||
# None means 'block everything'.
|
||||
_homeserver_whitelist = None # type: Optional[Pattern[str]]
|
||||
_homeserver_whitelist: Optional[Pattern[str]] = None
|
||||
|
||||
# Util methods
|
||||
|
||||
|
@ -662,7 +662,7 @@ def inject_header_dict(
|
|||
|
||||
span = opentracing.tracer.active_span
|
||||
|
||||
carrier = {} # type: Dict[str, str]
|
||||
carrier: Dict[str, str] = {}
|
||||
opentracing.tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, carrier)
|
||||
|
||||
for key, value in carrier.items():
|
||||
|
@ -704,7 +704,7 @@ def get_active_span_text_map(destination=None):
|
|||
if destination and not whitelisted_homeserver(destination):
|
||||
return {}
|
||||
|
||||
carrier = {} # type: Dict[str, str]
|
||||
carrier: Dict[str, str] = {}
|
||||
opentracing.tracer.inject(
|
||||
opentracing.tracer.active_span.context, opentracing.Format.TEXT_MAP, carrier
|
||||
)
|
||||
|
@ -718,7 +718,7 @@ def active_span_context_as_string():
|
|||
Returns:
|
||||
The active span context encoded as a string.
|
||||
"""
|
||||
carrier = {} # type: Dict[str, str]
|
||||
carrier: Dict[str, str] = {}
|
||||
if opentracing:
|
||||
opentracing.tracer.inject(
|
||||
opentracing.tracer.active_span.context, opentracing.Format.TEXT_MAP, carrier
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue