Remove unused # type: ignores (#12531)

Over time we've begun to use newer versions of mypy, typeshed, stub
packages---and of course we've improved our own annotations. This makes
some type ignore comments no longer necessary. I have removed them.

There was one exception: a module that imports `select.epoll`. The
ignore is redundant on Linux, but I've kept it ignored for those of us
who work on the source tree using not-Linux. (#11771)

I'm more interested in the config line which enforces this. I want
unused ignores to be reported, because I think it's useful feedback when
annotating to know when you've fixed a problem you had to previously
ignore.

* Installing extras before typechecking

Lacking an easy way to install all extras generically, let's bite the bullet and
make install the hand-maintained `all` extra before typechecking.

Now that https://github.com/matrix-org/backend-meta/pull/6 is merged to
the release/v1 branch.
This commit is contained in:
David Robertson 2022-04-27 14:03:44 +01:00 committed by GitHub
parent 8a23bde823
commit 6463244375
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 60 additions and 57 deletions

View File

@ -20,13 +20,9 @@ jobs:
- run: scripts-dev/config-lint.sh - run: scripts-dev/config-lint.sh
lint: lint:
# This does a vanilla `poetry install` - no extras. I'm slightly anxious
# that we might skip some typechecks on code that uses extras. However,
# I think the right way to fix this is to mark any extras needed for
# typechecking as development dependencies. To detect this, we ought to
# turn up mypy's strictness: disallow unknown imports and be accept fewer
# uses of `Any`.
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1" uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
with:
typechecking-extras: "all"
lint-crlf: lint-crlf:
runs-on: ubuntu-latest runs-on: ubuntu-latest

1
changelog.d/12531.misc Normal file
View File

@ -0,0 +1 @@
Remove unused `# type: ignore`s.

View File

@ -7,6 +7,7 @@ show_error_codes = True
show_traceback = True show_traceback = True
mypy_path = stubs mypy_path = stubs
warn_unreachable = True warn_unreachable = True
warn_unused_ignores = True
local_partial_types = True local_partial_types = True
no_implicit_optional = True no_implicit_optional = True
@ -134,6 +135,11 @@ disallow_untyped_defs = True
[mypy-synapse.metrics.*] [mypy-synapse.metrics.*]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.metrics._reactor_metrics]
# This module imports select.epoll. That exists on Linux, but doesn't on macOS.
# See https://github.com/matrix-org/synapse/pull/11771.
warn_unused_ignores = False
[mypy-synapse.module_api.*] [mypy-synapse.module_api.*]
disallow_untyped_defs = True disallow_untyped_defs = True

View File

@ -115,9 +115,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
def __getitem__(self, index: slice) -> List[_KT_co]: ... def __getitem__(self, index: slice) -> List[_KT_co]: ...
def __delitem__(self, index: Union[int, slice]) -> None: ... def __delitem__(self, index: Union[int, slice]) -> None: ...
class SortedItemsView( # type: ignore class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]
):
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
@overload @overload
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ... def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...

View File

@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel
from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.protocols.tls import TLSMemoryBIOFactory
from twisted.python.threadpool import ThreadPool from twisted.python.threadpool import ThreadPool
import synapse
from synapse.api.constants import MAX_PDU_SIZE from synapse.api.constants import MAX_PDU_SIZE
from synapse.app import check_bind_error from synapse.app import check_bind_error
from synapse.app.phone_stats_home import start_phone_stats_home from synapse.app.phone_stats_home import start_phone_stats_home
@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.events.third_party_rules import load_legacy_third_party_event_rules
from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.handlers.auth import load_legacy_password_auth_providers
from synapse.logging.context import PreserveLoggingContext from synapse.logging.context import PreserveLoggingContext
from synapse.logging.opentracing import init_tracer
from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics import install_gc_manager, register_threadpool
from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.metrics.jemalloc import setup_jemalloc_stats from synapse.metrics.jemalloc import setup_jemalloc_stats
@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None:
refresh_certificate(hs) refresh_certificate(hs)
# Start the tracer # Start the tracer
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa init_tracer(hs) # noqa
# Instantiate the modules so they can register their web resources to the module API # Instantiate the modules so they can register their web resources to the module API
# before we start the listeners. # before we start the listeners.

View File

@ -186,7 +186,7 @@ KNOWN_RESOURCES = {
class HttpResourceConfig: class HttpResourceConfig:
names: List[str] = attr.ib( names: List[str] = attr.ib(
factory=list, factory=list,
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
) )
compress: bool = attr.ib( compress: bool = attr.ib(
default=False, default=False,
@ -231,9 +231,7 @@ class ManholeConfig:
class LimitRemoteRoomsConfig: class LimitRemoteRoomsConfig:
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False) enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
complexity: Union[float, int] = attr.ib( complexity: Union[float, int] = attr.ib(
validator=attr.validators.instance_of( validator=attr.validators.instance_of((float, int)), # noqa
(float, int) # type: ignore[arg-type] # noqa
),
default=1.0, default=1.0,
) )
complexity_error: str = attr.ib( complexity_error: str = attr.ib(

View File

@ -268,8 +268,8 @@ class FederationServer(FederationBase):
transaction_id=transaction_id, transaction_id=transaction_id,
destination=destination, destination=destination,
origin=origin, origin=origin,
origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore origin_server_ts=transaction_data.get("origin_server_ts"), # type: ignore[arg-type]
pdus=transaction_data.get("pdus"), # type: ignore pdus=transaction_data.get("pdus"),
edus=transaction_data.get("edus"), edus=transaction_data.get("edus"),
) )

View File

@ -229,21 +229,21 @@ class TransportLayerClient:
""" """
logger.debug( logger.debug(
"send_data dest=%s, txid=%s", "send_data dest=%s, txid=%s",
transaction.destination, # type: ignore transaction.destination,
transaction.transaction_id, # type: ignore transaction.transaction_id,
) )
if transaction.destination == self.server_name: # type: ignore if transaction.destination == self.server_name:
raise RuntimeError("Transport layer cannot send to itself!") raise RuntimeError("Transport layer cannot send to itself!")
# FIXME: This is only used by the tests. The actual json sent is # FIXME: This is only used by the tests. The actual json sent is
# generated by the json_data_callback. # generated by the json_data_callback.
json_data = transaction.get_dict() json_data = transaction.get_dict()
path = _create_v1_path("/send/%s", transaction.transaction_id) # type: ignore path = _create_v1_path("/send/%s", transaction.transaction_id)
return await self.client.put_json( return await self.client.put_json(
transaction.destination, # type: ignore transaction.destination,
path=path, path=path,
data=json_data, data=json_data,
json_data_callback=json_data_callback, json_data_callback=json_data_callback,

View File

@ -481,7 +481,7 @@ class AuthHandler:
sid = authdict["session"] sid = authdict["session"]
# Convert the URI and method to strings. # Convert the URI and method to strings.
uri = request.uri.decode("utf-8") # type: ignore uri = request.uri.decode("utf-8")
method = request.method.decode("utf-8") method = request.method.decode("utf-8")
# If there's no session ID, create a new session. # If there's no session ID, create a new session.

View File

@ -966,7 +966,7 @@ class OidcProvider:
"Mapping provider does not support de-duplicating Matrix IDs" "Mapping provider does not support de-duplicating Matrix IDs"
) )
attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore attributes = await self._user_mapping_provider.map_user_attributes(
userinfo, token userinfo, token
) )

View File

@ -357,7 +357,7 @@ class SearchHandler:
itertools.chain( itertools.chain(
# The events_before and events_after for each context. # The events_before and events_after for each context.
itertools.chain.from_iterable( itertools.chain.from_iterable(
itertools.chain(context["events_before"], context["events_after"]) # type: ignore[arg-type] itertools.chain(context["events_before"], context["events_after"])
for context in contexts.values() for context in contexts.values()
), ),
# The returned events. # The returned events.
@ -373,10 +373,10 @@ class SearchHandler:
for context in contexts.values(): for context in contexts.values():
context["events_before"] = self._event_serializer.serialize_events( context["events_before"] = self._event_serializer.serialize_events(
context["events_before"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] context["events_before"], time_now, bundle_aggregations=aggregations
) )
context["events_after"] = self._event_serializer.serialize_events( context["events_after"] = self._event_serializer.serialize_events(
context["events_after"], time_now, bundle_aggregations=aggregations # type: ignore[arg-type] context["events_after"], time_now, bundle_aggregations=aggregations
) )
results = [ results = [

View File

@ -295,7 +295,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta):
if isawaitable(raw_callback_return): if isawaitable(raw_callback_return):
callback_return = await raw_callback_return callback_return = await raw_callback_return
else: else:
callback_return = raw_callback_return # type: ignore callback_return = raw_callback_return
return callback_return return callback_return
@ -469,7 +469,7 @@ class JsonResource(DirectServeJsonResource):
if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)): if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)):
callback_return = await raw_callback_return callback_return = await raw_callback_return
else: else:
callback_return = raw_callback_return # type: ignore callback_return = raw_callback_return
return callback_return return callback_return

View File

@ -109,6 +109,7 @@ from synapse.storage.state import StateFilter
from synapse.types import ( from synapse.types import (
DomainSpecificString, DomainSpecificString,
JsonDict, JsonDict,
JsonMapping,
Requester, Requester,
StateMap, StateMap,
UserID, UserID,
@ -151,6 +152,7 @@ __all__ = [
"PRESENCE_ALL_USERS", "PRESENCE_ALL_USERS",
"LoginResponse", "LoginResponse",
"JsonDict", "JsonDict",
"JsonMapping",
"EventBase", "EventBase",
"StateMap", "StateMap",
"ProfileInfo", "ProfileInfo",
@ -1419,7 +1421,7 @@ class AccountDataManager:
f"{user_id} is not local to this homeserver; can't access account data for remote users." f"{user_id} is not local to this homeserver; can't access account data for remote users."
) )
async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]: async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]:
""" """
Gets some global account data, of a specified type, for the specified user. Gets some global account data, of a specified type, for the specified user.

View File

@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
# is racy. # is racy.
# Have resolved to invalidate the whole cache for now and do # Have resolved to invalidate the whole cache for now and do
# something about it if and when the perf becomes significant # something about it if and when the perf becomes significant
self._invalidate_all_cache_and_stream( # type: ignore[attr-defined] self._invalidate_all_cache_and_stream(
txn, self.user_last_seen_monthly_active txn, self.user_last_seen_monthly_active
) )
self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ()) # type: ignore[attr-defined] self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())
reserved_users = await self.get_registered_reserved_users() reserved_users = await self.get_registered_reserved_users()
await self.db_pool.runInteraction( await self.db_pool.runInteraction(
@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
if self._limit_usage_by_mau or self._mau_stats_only: if self._limit_usage_by_mau or self._mau_stats_only:
# Trial users and guests should not be included as part of MAU group # Trial users and guests should not be included as part of MAU group
is_guest = await self.is_guest(user_id) # type: ignore[attr-defined] is_guest = await self.is_guest(user_id)
if is_guest: if is_guest:
return return
is_trial = await self.is_trial_user(user_id) is_trial = await self.is_trial_user(user_id)

View File

@ -501,11 +501,11 @@ def _upgrade_existing_database(
if hasattr(module, "run_create"): if hasattr(module, "run_create"):
logger.info("Running %s:run_create", relative_path) logger.info("Running %s:run_create", relative_path)
module.run_create(cur, database_engine) # type: ignore module.run_create(cur, database_engine)
if not is_empty and hasattr(module, "run_upgrade"): if not is_empty and hasattr(module, "run_upgrade"):
logger.info("Running %s:run_upgrade", relative_path) logger.info("Running %s:run_upgrade", relative_path)
module.run_upgrade(cur, database_engine, config=config) # type: ignore module.run_upgrade(cur, database_engine, config=config)
elif ext == ".pyc" or file_name == "__pycache__": elif ext == ".pyc" or file_name == "__pycache__":
# Sometimes .pyc files turn up anyway even though we've # Sometimes .pyc files turn up anyway even though we've
# disabled their generation; e.g. from distribution package # disabled their generation; e.g. from distribution package

View File

@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]):
self._metrics.inc_hits() self._metrics.inc_hits()
return e.value, e.expiry_time, e.ttl return e.value, e.expiry_time, e.ttl
def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
"""Remove a value from the cache """Remove a value from the cache
If key is in the cache, remove it and return its value, else return default. If key is in the cache, remove it and return its value, else return default.

View File

@ -193,8 +193,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
@override_config({"limit_usage_by_mau": True}) @override_config({"limit_usage_by_mau": True})
def test_get_or_create_user_mau_not_blocked(self): def test_get_or_create_user_mau_not_blocked(self):
# Type ignore: mypy doesn't like us assigning to methods. self.store.count_monthly_users = Mock(
self.store.count_monthly_users = Mock( # type: ignore[assignment]
return_value=make_awaitable(self.hs.config.server.max_mau_value - 1) return_value=make_awaitable(self.hs.config.server.max_mau_value - 1)
) )
# Ensure does not throw exception # Ensure does not throw exception
@ -202,8 +201,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
@override_config({"limit_usage_by_mau": True}) @override_config({"limit_usage_by_mau": True})
def test_get_or_create_user_mau_blocked(self): def test_get_or_create_user_mau_blocked(self):
# Type ignore: mypy doesn't like us assigning to methods. self.store.get_monthly_active_count = Mock(
self.store.get_monthly_active_count = Mock( # type: ignore[assignment]
return_value=make_awaitable(self.lots_of_users) return_value=make_awaitable(self.lots_of_users)
) )
self.get_failure( self.get_failure(
@ -211,8 +209,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
ResourceLimitError, ResourceLimitError,
) )
# Type ignore: mypy doesn't like us assigning to methods. self.store.get_monthly_active_count = Mock(
self.store.get_monthly_active_count = Mock( # type: ignore[assignment]
return_value=make_awaitable(self.hs.config.server.max_mau_value) return_value=make_awaitable(self.hs.config.server.max_mau_value)
) )
self.get_failure( self.get_failure(

View File

@ -11,8 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from twisted.test.proto_helpers import MemoryReactor
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.rest import admin from synapse.rest import admin
from synapse.server import HomeServer
from synapse.util import Clock
from tests.unittest import HomeserverTestCase from tests.unittest import HomeserverTestCase
@ -22,7 +26,9 @@ class ModuleApiTestCase(HomeserverTestCase):
admin.register_servlets, admin.register_servlets,
] ]
def prepare(self, reactor, clock, homeserver) -> None: def prepare(
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
) -> None:
self._store = homeserver.get_datastores().main self._store = homeserver.get_datastores().main
self._module_api = homeserver.get_module_api() self._module_api = homeserver.get_module_api()
self._account_data_mgr = self._module_api.account_data_manager self._account_data_mgr = self._module_api.account_data_manager
@ -91,7 +97,7 @@ class ModuleApiTestCase(HomeserverTestCase):
) )
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
# This throws an exception because it's a frozen dict. # This throws an exception because it's a frozen dict.
the_data["wombat"] = False the_data["wombat"] = False # type: ignore[index]
def test_put_global(self) -> None: def test_put_global(self) -> None:
""" """
@ -143,15 +149,14 @@ class ModuleApiTestCase(HomeserverTestCase):
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
# The account data type must be a string. # The account data type must be a string.
self.get_success_or_raise( self.get_success_or_raise(
self._module_api.account_data_manager.put_global( self._module_api.account_data_manager.put_global(self.user_id, 42, {}) # type: ignore[arg-type]
self.user_id, 42, {} # type: ignore
)
) )
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
# The account data dict must be a dict. # The account data dict must be a dict.
# noinspection PyTypeChecker
self.get_success_or_raise( self.get_success_or_raise(
self._module_api.account_data_manager.put_global( self._module_api.account_data_manager.put_global(
self.user_id, "test.data", 42 # type: ignore self.user_id, "test.data", 42 # type: ignore[arg-type]
) )
) )

View File

@ -102,8 +102,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
for i in range(20): for i in range(20):
server_name = "other_server_%d" % (i,) server_name = "other_server_%d" % (i,)
room = self.create_room_with_remote_server(user, token, server_name) room = self.create_room_with_remote_server(user, token, server_name)
mock_client1.reset_mock() # type: ignore[attr-defined] mock_client1.reset_mock()
mock_client2.reset_mock() # type: ignore[attr-defined] mock_client2.reset_mock()
self.create_and_send_event(room, UserID.from_string(user)) self.create_and_send_event(room, UserID.from_string(user))
self.replicate() self.replicate()
@ -167,8 +167,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
for i in range(20): for i in range(20):
server_name = "other_server_%d" % (i,) server_name = "other_server_%d" % (i,)
room = self.create_room_with_remote_server(user, token, server_name) room = self.create_room_with_remote_server(user, token, server_name)
mock_client1.reset_mock() # type: ignore[attr-defined] mock_client1.reset_mock()
mock_client2.reset_mock() # type: ignore[attr-defined] mock_client2.reset_mock()
self.get_success( self.get_success(
typing_handler.started_typing( typing_handler.started_typing(

View File

@ -52,7 +52,7 @@ def make_awaitable(result: TV) -> Awaitable[TV]:
This uses Futures as they can be awaited multiple times so can be returned This uses Futures as they can be awaited multiple times so can be returned
to multiple callers. to multiple callers.
""" """
future = Future() # type: ignore future: Future[TV] = Future()
future.set_result(result) future.set_result(result)
return future return future
@ -69,7 +69,7 @@ def setup_awaitable_errors() -> Callable[[], None]:
# State shared between unraisablehook and check_for_unraisable_exceptions. # State shared between unraisablehook and check_for_unraisable_exceptions.
unraisable_exceptions = [] unraisable_exceptions = []
orig_unraisablehook = sys.unraisablehook # type: ignore orig_unraisablehook = sys.unraisablehook
def unraisablehook(unraisable): def unraisablehook(unraisable):
unraisable_exceptions.append(unraisable.exc_value) unraisable_exceptions.append(unraisable.exc_value)
@ -78,11 +78,11 @@ def setup_awaitable_errors() -> Callable[[], None]:
""" """
A method to be used as a clean-up that fails a test-case if there are any new unraisable exceptions. A method to be used as a clean-up that fails a test-case if there are any new unraisable exceptions.
""" """
sys.unraisablehook = orig_unraisablehook # type: ignore sys.unraisablehook = orig_unraisablehook
if unraisable_exceptions: if unraisable_exceptions:
raise unraisable_exceptions.pop() raise unraisable_exceptions.pop()
sys.unraisablehook = unraisablehook # type: ignore sys.unraisablehook = unraisablehook
return cleanup return cleanup

View File

@ -27,7 +27,7 @@ class ToTwistedHandler(logging.Handler):
def emit(self, record): def emit(self, record):
log_entry = self.format(record) log_entry = self.format(record)
log_level = record.levelname.lower().replace("warning", "warn") log_level = record.levelname.lower().replace("warning", "warn")
self.tx_log.emit( # type: ignore self.tx_log.emit(
twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry
) )