mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-12-15 22:03:50 -05:00
Remove unused # type: ignores (#12531)
Over time we've begun to use newer versions of mypy, typeshed, stub packages---and of course we've improved our own annotations. This makes some type ignore comments no longer necessary. I have removed them. There was one exception: a module that imports `select.epoll`. The ignore is redundant on Linux, but I've kept it ignored for those of us who work on the source tree using not-Linux. (#11771) I'm more interested in the config line which enforces this. I want unused ignores to be reported, because I think it's useful feedback when annotating to know when you've fixed a problem you had to previously ignore. * Installing extras before typechecking Lacking an easy way to install all extras generically, let's bite the bullet and make install the hand-maintained `all` extra before typechecking. Now that https://github.com/matrix-org/backend-meta/pull/6 is merged to the release/v1 branch.
This commit is contained in:
parent
8a23bde823
commit
6463244375
21 changed files with 60 additions and 57 deletions
|
|
@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel
|
|||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
import synapse
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
|
|
@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
|
|||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
|
|
@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None:
|
|||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
# Instantiate the modules so they can register their web resources to the module API
|
||||
# before we start the listeners.
|
||||
|
|
|
|||
|
|
@ -186,7 +186,7 @@ KNOWN_RESOURCES = {
|
|||
class HttpResourceConfig:
|
||||
names: List[str] = attr.ib(
|
||||
factory=list,
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
|
||||
)
|
||||
compress: bool = attr.ib(
|
||||
default=False,
|
||||
|
|
@ -231,9 +231,7 @@ class ManholeConfig:
|
|||
class LimitRemoteRoomsConfig:
|
||||
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
|
||||
complexity: Union[float, int] = attr.ib(
|
||||
validator=attr.validators.instance_of(
|
||||
(float, int) # type: ignore[arg-type] # noqa
|
||||
),
|
||||
validator=attr.validators.instance_of((float, int)), # noqa
|
||||
default=1.0,
|
||||
)
|
||||
complexity_error: str = attr.ib(
|
||||
|
|
|
|||
|
|
@ -268,8 +268,8 @@ class FederationServer(FederationBase):
|
|||
transaction_id=transaction_id,
|
||||
destination=destination,
|
||||
origin=origin,
|
||||
origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore
|
||||
pdus=transaction_data.get("pdus"), # type: ignore
|
||||
origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore[arg-type]
|
||||
pdus=transaction_data.get("pdus"),
|
||||
edus=transaction_data.get("edus"),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -229,21 +229,21 @@ class TransportLayerClient:
|
|||
"""
|
||||
logger.debug(
|
||||
"send_data dest=%s, txid=%s",
|
||||
transaction.destination, # type: ignore
|
||||
transaction.transaction_id, # type: ignore
|
||||
transaction.destination,
|
||||
transaction.transaction_id,
|
||||
)
|
||||
|
||||
if transaction.destination == self.server_name: # type: ignore
|
||||
if transaction.destination == self.server_name:
|
||||
raise RuntimeError("Transport layer cannot send to itself!")
|
||||
|
||||
# FIXME: This is only used by the tests. The actual json sent is
|
||||
# generated by the json_data_callback.
|
||||
json_data = transaction.get_dict()
|
||||
|
||||
path = _create_v1_path("/send/%s", transaction.transaction_id) # type: ignore
|
||||
path = _create_v1_path("/send/%s", transaction.transaction_id)
|
||||
|
||||
return await self.client.put_json(
|
||||
transaction.destination, # type: ignore
|
||||
transaction.destination,
|
||||
path=path,
|
||||
data=json_data,
|
||||
json_data_callback=json_data_callback,
|
||||
|
|
|
|||
|
|
@ -481,7 +481,7 @@ class AuthHandler:
|
|||
sid = authdict["session"]
|
||||
|
||||
# Convert the URI and method to strings.
|
||||
uri = request.uri.decode("utf-8") # type: ignore
|
||||
uri = request.uri.decode("utf-8")
|
||||
method = request.method.decode("utf-8")
|
||||
|
||||
# If there's no session ID, create a new session.
|
||||
|
|
|
|||
|
|
@ -966,7 +966,7 @@ class OidcProvider:
|
|||
"Mapping provider does not support de-duplicating Matrix IDs"
|
||||
)
|
||||
|
||||
attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore
|
||||
attributes = await self._user_mapping_provider.map_user_attributes(
|
||||
userinfo, token
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -357,7 +357,7 @@ class SearchHandler:
|
|||
itertools.chain(
|
||||
# The events_before and events_after for each context.
|
||||
itertools.chain.from_iterable(
|
||||
itertools.chain(context["events_before"], context["events_after"]) # type: ignore[arg-type]
|
||||
itertools.chain(context["events_before"], context["events_after"])
|
||||
for context in contexts.values()
|
||||
),
|
||||
# The returned events.
|
||||
|
|
@ -373,10 +373,10 @@ class SearchHandler:
|
|||
|
||||
for context in contexts.values():
|
||||
context["events_before"] = self._event_serializer.serialize_events(
|
||||
context["events_before"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type]
|
||||
context["events_before"], time_now, bundle_aggregations=aggregations
|
||||
)
|
||||
context["events_after"] = self._event_serializer.serialize_events(
|
||||
context["events_after"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type]
|
||||
context["events_after"], time_now, bundle_aggregations=aggregations
|
||||
)
|
||||
|
||||
results = [
|
||||
|
|
|
|||
|
|
@ -295,7 +295,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta):
|
|||
if isawaitable(raw_callback_return):
|
||||
callback_return = await raw_callback_return
|
||||
else:
|
||||
callback_return = raw_callback_return # type: ignore
|
||||
callback_return = raw_callback_return
|
||||
|
||||
return callback_return
|
||||
|
||||
|
|
@ -469,7 +469,7 @@ class JsonResource(DirectServeJsonResource):
|
|||
if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)):
|
||||
callback_return = await raw_callback_return
|
||||
else:
|
||||
callback_return = raw_callback_return # type: ignore
|
||||
callback_return = raw_callback_return
|
||||
|
||||
return callback_return
|
||||
|
||||
|
|
|
|||
|
|
@ -109,6 +109,7 @@ from synapse.storage.state import StateFilter
|
|||
from synapse.types import (
|
||||
DomainSpecificString,
|
||||
JsonDict,
|
||||
JsonMapping,
|
||||
Requester,
|
||||
StateMap,
|
||||
UserID,
|
||||
|
|
@ -151,6 +152,7 @@ __all__ = [
|
|||
"PRESENCE_ALL_USERS",
|
||||
"LoginResponse",
|
||||
"JsonDict",
|
||||
"JsonMapping",
|
||||
"EventBase",
|
||||
"StateMap",
|
||||
"ProfileInfo",
|
||||
|
|
@ -1419,7 +1421,7 @@ class AccountDataManager:
|
|||
f"{user_id} is not local to this homeserver; can't access account data for remote users."
|
||||
)
|
||||
|
||||
async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]:
|
||||
async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]:
|
||||
"""
|
||||
Gets some global account data, of a specified type, for the specified user.
|
||||
|
||||
|
|
|
|||
|
|
@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
|
|||
# is racy.
|
||||
# Have resolved to invalidate the whole cache for now and do
|
||||
# something about it if and when the perf becomes significant
|
||||
self._invalidate_all_cache_and_stream( # type: ignore[attr-defined]
|
||||
self._invalidate_all_cache_and_stream(
|
||||
txn, self.user_last_seen_monthly_active
|
||||
)
|
||||
self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) # type: ignore[attr-defined]
|
||||
self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())
|
||||
|
||||
reserved_users = await self.get_registered_reserved_users()
|
||||
await self.db_pool.runInteraction(
|
||||
|
|
@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
|
|||
|
||||
if self._limit_usage_by_mau or self._mau_stats_only:
|
||||
# Trial users and guests should not be included as part of MAU group
|
||||
is_guest = await self.is_guest(user_id) # type: ignore[attr-defined]
|
||||
is_guest = await self.is_guest(user_id)
|
||||
if is_guest:
|
||||
return
|
||||
is_trial = await self.is_trial_user(user_id)
|
||||
|
|
|
|||
|
|
@ -501,11 +501,11 @@ def _upgrade_existing_database(
|
|||
|
||||
if hasattr(module, "run_create"):
|
||||
logger.info("Running %s:run_create", relative_path)
|
||||
module.run_create(cur, database_engine) # type: ignore
|
||||
module.run_create(cur, database_engine)
|
||||
|
||||
if not is_empty and hasattr(module, "run_upgrade"):
|
||||
logger.info("Running %s:run_upgrade", relative_path)
|
||||
module.run_upgrade(cur, database_engine, config=config) # type: ignore
|
||||
module.run_upgrade(cur, database_engine, config=config)
|
||||
elif ext == ".pyc" or file_name == "__pycache__":
|
||||
# Sometimes .pyc files turn up anyway even though we've
|
||||
# disabled their generation; e.g. from distribution package
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]):
|
|||
self._metrics.inc_hits()
|
||||
return e.value, e.expiry_time, e.ttl
|
||||
|
||||
def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore
|
||||
def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
|
||||
"""Remove a value from the cache
|
||||
|
||||
If key is in the cache, remove it and return its value, else return default.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue