Browse Source

Remove unused `# type: ignore`s (#12531)

Over time we've begun to use newer versions of mypy, typeshed, stub
packages---and of course we've improved our own annotations. This makes
some type ignore comments no longer necessary. I have removed them.

There was one exception: a module that imports `select.epoll`. The
ignore is redundant on Linux, but I've kept it ignored for those of us
who work on the source tree using not-Linux. (#11771)

I'm more interested in the config line which enforces this. I want
unused ignores to be reported, because I think it's useful feedback when
annotating to know when you've fixed a problem you had to previously
ignore.

* Installing extras before typechecking

Lacking an easy way to install all extras generically, let's bite the bullet and
make install the hand-maintained `all` extra before typechecking.

Now that https://github.com/matrix-org/backend-meta/pull/6 is merged to
the release/v1 branch.
David Robertson 2 years ago
parent
commit
6463244375

+ 2 - 6
.github/workflows/tests.yml

@@ -20,13 +20,9 @@ jobs:
       - run: scripts-dev/config-lint.sh
 
   lint:
-    # This does a vanilla `poetry install` - no extras. I'm slightly anxious
-    # that we might skip some typechecks on code that uses extras. However,
-    # I think the right way to fix this is to mark any extras needed for
-    # typechecking as development dependencies. To detect this, we ought to
-    # turn up mypy's strictness: disallow unknown imports and be accept fewer
-    # uses of `Any`.
     uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
+    with:
+      typechecking-extras: "all"
 
   lint-crlf:
     runs-on: ubuntu-latest

+ 1 - 0
changelog.d/12531.misc

@@ -0,0 +1 @@
+Remove unused `# type: ignore`s.

+ 6 - 0
mypy.ini

@@ -7,6 +7,7 @@ show_error_codes = True
 show_traceback = True
 mypy_path = stubs
 warn_unreachable = True
+warn_unused_ignores = True
 local_partial_types = True
 no_implicit_optional = True
 
@@ -134,6 +135,11 @@ disallow_untyped_defs = True
 [mypy-synapse.metrics.*]
 disallow_untyped_defs = True
 
+[mypy-synapse.metrics._reactor_metrics]
+# This module imports select.epoll. That exists on Linux, but doesn't on macOS.
+# See https://github.com/matrix-org/synapse/pull/11771.
+warn_unused_ignores = False
+
 [mypy-synapse.module_api.*]
 disallow_untyped_defs = True
 

+ 1 - 3
stubs/sortedcontainers/sorteddict.pyi

@@ -115,9 +115,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
     def __getitem__(self, index: slice) -> List[_KT_co]: ...
     def __delitem__(self, index: Union[int, slice]) -> None: ...
 
-class SortedItemsView(  # type: ignore
-    ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]
-):
+class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
     def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
     @overload
     def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...

+ 2 - 2
synapse/app/_base.py

@@ -48,7 +48,6 @@ from twisted.logger import LoggingFile, LogLevel
 from twisted.protocols.tls import TLSMemoryBIOFactory
 from twisted.python.threadpool import ThreadPool
 
-import synapse
 from synapse.api.constants import MAX_PDU_SIZE
 from synapse.app import check_bind_error
 from synapse.app.phone_stats_home import start_phone_stats_home
@@ -60,6 +59,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
 from synapse.events.third_party_rules import load_legacy_third_party_event_rules
 from synapse.handlers.auth import load_legacy_password_auth_providers
 from synapse.logging.context import PreserveLoggingContext
+from synapse.logging.opentracing import init_tracer
 from synapse.metrics import install_gc_manager, register_threadpool
 from synapse.metrics.background_process_metrics import wrap_as_background_process
 from synapse.metrics.jemalloc import setup_jemalloc_stats
@@ -431,7 +431,7 @@ async def start(hs: "HomeServer") -> None:
     refresh_certificate(hs)
 
     # Start the tracer
-    synapse.logging.opentracing.init_tracer(hs)  # type: ignore[attr-defined] # noqa
+    init_tracer(hs)  # noqa
 
     # Instantiate the modules so they can register their web resources to the module API
     # before we start the listeners.

+ 2 - 4
synapse/config/server.py

@@ -186,7 +186,7 @@ KNOWN_RESOURCES = {
 class HttpResourceConfig:
     names: List[str] = attr.ib(
         factory=list,
-        validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),  # type: ignore
+        validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
     )
     compress: bool = attr.ib(
         default=False,
@@ -231,9 +231,7 @@ class ManholeConfig:
 class LimitRemoteRoomsConfig:
     enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
     complexity: Union[float, int] = attr.ib(
-        validator=attr.validators.instance_of(
-            (float, int)  # type: ignore[arg-type] # noqa
-        ),
+        validator=attr.validators.instance_of((float, int)),  # noqa
         default=1.0,
     )
     complexity_error: str = attr.ib(

+ 2 - 2
synapse/federation/federation_server.py

@@ -268,8 +268,8 @@ class FederationServer(FederationBase):
             transaction_id=transaction_id,
             destination=destination,
             origin=origin,
-            origin_server_ts=transaction_data.get("origin_server_ts"),  # type: ignore
-            pdus=transaction_data.get("pdus"),  # type: ignore
+            origin_server_ts=transaction_data.get("origin_server_ts"),  # type: ignore[arg-type]
+            pdus=transaction_data.get("pdus"),
             edus=transaction_data.get("edus"),
         )
 

+ 5 - 5
synapse/federation/transport/client.py

@@ -229,21 +229,21 @@ class TransportLayerClient:
         """
         logger.debug(
             "send_data dest=%s, txid=%s",
-            transaction.destination,  # type: ignore
-            transaction.transaction_id,  # type: ignore
+            transaction.destination,
+            transaction.transaction_id,
         )
 
-        if transaction.destination == self.server_name:  # type: ignore
+        if transaction.destination == self.server_name:
             raise RuntimeError("Transport layer cannot send to itself!")
 
         # FIXME: This is only used by the tests. The actual json sent is
         # generated by the json_data_callback.
         json_data = transaction.get_dict()
 
-        path = _create_v1_path("/send/%s", transaction.transaction_id)  # type: ignore
+        path = _create_v1_path("/send/%s", transaction.transaction_id)
 
         return await self.client.put_json(
-            transaction.destination,  # type: ignore
+            transaction.destination,
             path=path,
             data=json_data,
             json_data_callback=json_data_callback,

+ 1 - 1
synapse/handlers/auth.py

@@ -481,7 +481,7 @@ class AuthHandler:
             sid = authdict["session"]
 
         # Convert the URI and method to strings.
-        uri = request.uri.decode("utf-8")  # type: ignore
+        uri = request.uri.decode("utf-8")
         method = request.method.decode("utf-8")
 
         # If there's no session ID, create a new session.

+ 1 - 1
synapse/handlers/oidc.py

@@ -966,7 +966,7 @@ class OidcProvider:
                         "Mapping provider does not support de-duplicating Matrix IDs"
                     )
 
-                attributes = await self._user_mapping_provider.map_user_attributes(  # type: ignore
+                attributes = await self._user_mapping_provider.map_user_attributes(
                     userinfo, token
                 )
 

+ 3 - 3
synapse/handlers/search.py

@@ -357,7 +357,7 @@ class SearchHandler:
             itertools.chain(
                 # The events_before and events_after for each context.
                 itertools.chain.from_iterable(
-                    itertools.chain(context["events_before"], context["events_after"])  # type: ignore[arg-type]
+                    itertools.chain(context["events_before"], context["events_after"])
                     for context in contexts.values()
                 ),
                 # The returned events.
@@ -373,10 +373,10 @@ class SearchHandler:
 
         for context in contexts.values():
             context["events_before"] = self._event_serializer.serialize_events(
-                context["events_before"], time_now, bundle_aggregations=aggregations  # type: ignore[arg-type]
+                context["events_before"], time_now, bundle_aggregations=aggregations
             )
             context["events_after"] = self._event_serializer.serialize_events(
-                context["events_after"], time_now, bundle_aggregations=aggregations  # type: ignore[arg-type]
+                context["events_after"], time_now, bundle_aggregations=aggregations
             )
 
         results = [

+ 2 - 2
synapse/http/server.py

@@ -295,7 +295,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta):
             if isawaitable(raw_callback_return):
                 callback_return = await raw_callback_return
             else:
-                callback_return = raw_callback_return  # type: ignore
+                callback_return = raw_callback_return
 
             return callback_return
 
@@ -469,7 +469,7 @@ class JsonResource(DirectServeJsonResource):
         if isinstance(raw_callback_return, (defer.Deferred, types.CoroutineType)):
             callback_return = await raw_callback_return
         else:
-            callback_return = raw_callback_return  # type: ignore
+            callback_return = raw_callback_return
 
         return callback_return
 

+ 3 - 1
synapse/module_api/__init__.py

@@ -109,6 +109,7 @@ from synapse.storage.state import StateFilter
 from synapse.types import (
     DomainSpecificString,
     JsonDict,
+    JsonMapping,
     Requester,
     StateMap,
     UserID,
@@ -151,6 +152,7 @@ __all__ = [
     "PRESENCE_ALL_USERS",
     "LoginResponse",
     "JsonDict",
+    "JsonMapping",
     "EventBase",
     "StateMap",
     "ProfileInfo",
@@ -1419,7 +1421,7 @@ class AccountDataManager:
                 f"{user_id} is not local to this homeserver; can't access account data for remote users."
             )
 
-    async def get_global(self, user_id: str, data_type: str) -> Optional[JsonDict]:
+    async def get_global(self, user_id: str, data_type: str) -> Optional[JsonMapping]:
         """
         Gets some global account data, of a specified type, for the specified user.
 

+ 3 - 3
synapse/storage/databases/main/monthly_active_users.py

@@ -232,10 +232,10 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
             # is racy.
             # Have resolved to invalidate the whole cache for now and do
             # something about it if and when the perf becomes significant
-            self._invalidate_all_cache_and_stream(  # type: ignore[attr-defined]
+            self._invalidate_all_cache_and_stream(
                 txn, self.user_last_seen_monthly_active
             )
-            self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())  # type: ignore[attr-defined]
+            self._invalidate_cache_and_stream(txn, self.get_monthly_active_count, ())
 
         reserved_users = await self.get_registered_reserved_users()
         await self.db_pool.runInteraction(
@@ -363,7 +363,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
 
         if self._limit_usage_by_mau or self._mau_stats_only:
             # Trial users and guests should not be included as part of MAU group
-            is_guest = await self.is_guest(user_id)  # type: ignore[attr-defined]
+            is_guest = await self.is_guest(user_id)
             if is_guest:
                 return
             is_trial = await self.is_trial_user(user_id)

+ 2 - 2
synapse/storage/prepare_database.py

@@ -501,11 +501,11 @@ def _upgrade_existing_database(
 
                 if hasattr(module, "run_create"):
                     logger.info("Running %s:run_create", relative_path)
-                    module.run_create(cur, database_engine)  # type: ignore
+                    module.run_create(cur, database_engine)
 
                 if not is_empty and hasattr(module, "run_upgrade"):
                     logger.info("Running %s:run_upgrade", relative_path)
-                    module.run_upgrade(cur, database_engine, config=config)  # type: ignore
+                    module.run_upgrade(cur, database_engine, config=config)
             elif ext == ".pyc" or file_name == "__pycache__":
                 # Sometimes .pyc files turn up anyway even though we've
                 # disabled their generation; e.g. from distribution package

+ 1 - 1
synapse/util/caches/ttlcache.py

@@ -107,7 +107,7 @@ class TTLCache(Generic[KT, VT]):
         self._metrics.inc_hits()
         return e.value, e.expiry_time, e.ttl
 
-    def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:  # type: ignore
+    def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
         """Remove a value from the cache
 
         If key is in the cache, remove it and return its value, else return default.

+ 3 - 6
tests/handlers/test_register.py

@@ -193,8 +193,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
 
     @override_config({"limit_usage_by_mau": True})
     def test_get_or_create_user_mau_not_blocked(self):
-        # Type ignore: mypy doesn't like us assigning to methods.
-        self.store.count_monthly_users = Mock(  # type: ignore[assignment]
+        self.store.count_monthly_users = Mock(
             return_value=make_awaitable(self.hs.config.server.max_mau_value - 1)
         )
         # Ensure does not throw exception
@@ -202,8 +201,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
 
     @override_config({"limit_usage_by_mau": True})
     def test_get_or_create_user_mau_blocked(self):
-        # Type ignore: mypy doesn't like us assigning to methods.
-        self.store.get_monthly_active_count = Mock(  # type: ignore[assignment]
+        self.store.get_monthly_active_count = Mock(
             return_value=make_awaitable(self.lots_of_users)
         )
         self.get_failure(
@@ -211,8 +209,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase):
             ResourceLimitError,
         )
 
-        # Type ignore: mypy doesn't like us assigning to methods.
-        self.store.get_monthly_active_count = Mock(  # type: ignore[assignment]
+        self.store.get_monthly_active_count = Mock(
             return_value=make_awaitable(self.hs.config.server.max_mau_value)
         )
         self.get_failure(

+ 11 - 6
tests/module_api/test_account_data_manager.py

@@ -11,8 +11,12 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from twisted.test.proto_helpers import MemoryReactor
+
 from synapse.api.errors import SynapseError
 from synapse.rest import admin
+from synapse.server import HomeServer
+from synapse.util import Clock
 
 from tests.unittest import HomeserverTestCase
 
@@ -22,7 +26,9 @@ class ModuleApiTestCase(HomeserverTestCase):
         admin.register_servlets,
     ]
 
-    def prepare(self, reactor, clock, homeserver) -> None:
+    def prepare(
+        self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
+    ) -> None:
         self._store = homeserver.get_datastores().main
         self._module_api = homeserver.get_module_api()
         self._account_data_mgr = self._module_api.account_data_manager
@@ -91,7 +97,7 @@ class ModuleApiTestCase(HomeserverTestCase):
         )
         with self.assertRaises(TypeError):
             # This throws an exception because it's a frozen dict.
-            the_data["wombat"] = False
+            the_data["wombat"] = False  # type: ignore[index]
 
     def test_put_global(self) -> None:
         """
@@ -143,15 +149,14 @@ class ModuleApiTestCase(HomeserverTestCase):
         with self.assertRaises(TypeError):
             # The account data type must be a string.
             self.get_success_or_raise(
-                self._module_api.account_data_manager.put_global(
-                    self.user_id, 42, {}  # type: ignore
-                )
+                self._module_api.account_data_manager.put_global(self.user_id, 42, {})  # type: ignore[arg-type]
             )
 
         with self.assertRaises(TypeError):
             # The account data dict must be a dict.
+            # noinspection PyTypeChecker
             self.get_success_or_raise(
                 self._module_api.account_data_manager.put_global(
-                    self.user_id, "test.data", 42  # type: ignore
+                    self.user_id, "test.data", 42  # type: ignore[arg-type]
                 )
             )

+ 4 - 4
tests/replication/test_federation_sender_shard.py

@@ -102,8 +102,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
         for i in range(20):
             server_name = "other_server_%d" % (i,)
             room = self.create_room_with_remote_server(user, token, server_name)
-            mock_client1.reset_mock()  # type: ignore[attr-defined]
-            mock_client2.reset_mock()  # type: ignore[attr-defined]
+            mock_client1.reset_mock()
+            mock_client2.reset_mock()
 
             self.create_and_send_event(room, UserID.from_string(user))
             self.replicate()
@@ -167,8 +167,8 @@ class FederationSenderTestCase(BaseMultiWorkerStreamTestCase):
         for i in range(20):
             server_name = "other_server_%d" % (i,)
             room = self.create_room_with_remote_server(user, token, server_name)
-            mock_client1.reset_mock()  # type: ignore[attr-defined]
-            mock_client2.reset_mock()  # type: ignore[attr-defined]
+            mock_client1.reset_mock()
+            mock_client2.reset_mock()
 
             self.get_success(
                 typing_handler.started_typing(

+ 4 - 4
tests/test_utils/__init__.py

@@ -52,7 +52,7 @@ def make_awaitable(result: TV) -> Awaitable[TV]:
     This uses Futures as they can be awaited multiple times so can be returned
     to multiple callers.
     """
-    future = Future()  # type: ignore
+    future: Future[TV] = Future()
     future.set_result(result)
     return future
 
@@ -69,7 +69,7 @@ def setup_awaitable_errors() -> Callable[[], None]:
 
     # State shared between unraisablehook and check_for_unraisable_exceptions.
     unraisable_exceptions = []
-    orig_unraisablehook = sys.unraisablehook  # type: ignore
+    orig_unraisablehook = sys.unraisablehook
 
     def unraisablehook(unraisable):
         unraisable_exceptions.append(unraisable.exc_value)
@@ -78,11 +78,11 @@ def setup_awaitable_errors() -> Callable[[], None]:
         """
         A method to be used as a clean-up that fails a test-case if there are any new unraisable exceptions.
         """
-        sys.unraisablehook = orig_unraisablehook  # type: ignore
+        sys.unraisablehook = orig_unraisablehook
         if unraisable_exceptions:
             raise unraisable_exceptions.pop()
 
-    sys.unraisablehook = unraisablehook  # type: ignore
+    sys.unraisablehook = unraisablehook
 
     return cleanup
 

+ 1 - 1
tests/test_utils/logging_setup.py

@@ -27,7 +27,7 @@ class ToTwistedHandler(logging.Handler):
     def emit(self, record):
         log_entry = self.format(record)
         log_level = record.levelname.lower().replace("warning", "warn")
-        self.tx_log.emit(  # type: ignore
+        self.tx_log.emit(
             twisted.logger.LogLevel.levelWithName(log_level), "{entry}", entry=log_entry
         )