Browse Source

Add stricter mypy options (#15694)

Enable warn_unused_configs, strict_concatenate, disallow_subclassing_any,
and disallow_incomplete_defs.
Patrick Cloke 1 year ago
parent
commit
c01343de43

+ 1 - 0
changelog.d/15694.misc

@@ -0,0 +1 @@
+Improve type hints.

+ 20 - 3
mypy.ini

@@ -2,17 +2,29 @@
 namespace_packages = True
 plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
 follow_imports = normal
-check_untyped_defs = True
 show_error_codes = True
 show_traceback = True
 mypy_path = stubs
 warn_unreachable = True
-warn_unused_ignores = True
 local_partial_types = True
 no_implicit_optional = True
+
+# Strict checks, see mypy --help
+warn_unused_configs = True
+# disallow_any_generics = True
+disallow_subclassing_any = True
+# disallow_untyped_calls = True
 disallow_untyped_defs = True
-strict_equality = True
+disallow_incomplete_defs = True
+# check_untyped_defs = True
+# disallow_untyped_decorators = True
 warn_redundant_casts = True
+warn_unused_ignores = True
+# warn_return_any = True
+# no_implicit_reexport = True
+strict_equality = True
+strict_concatenate = True
+
 # Run mypy type checking with the minimum supported Python version to catch new usage
 # that isn't backwards-compatible (types, overloads, etc).
 python_version = 3.8
@@ -31,6 +43,7 @@ warn_unused_ignores = False
 
 [mypy-synapse.util.caches.treecache]
 disallow_untyped_defs = False
+disallow_incomplete_defs = False
 
 ;; Dependencies without annotations
 ;; Before ignoring a module, check to see if type stubs are available.
@@ -40,6 +53,7 @@ disallow_untyped_defs = False
 ;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
 ;; `[tool.poetry.dev-dependencies]` list.
 
+# https://github.com/lepture/authlib/issues/460
 [mypy-authlib.*]
 ignore_missing_imports = True
 
@@ -49,9 +63,11 @@ ignore_missing_imports = True
 [mypy-lxml]
 ignore_missing_imports = True
 
+# https://github.com/msgpack/msgpack-python/issues/448
 [mypy-msgpack]
 ignore_missing_imports = True
 
+# https://github.com/wolever/parameterized/issues/143
 [mypy-parameterized.*]
 ignore_missing_imports = True
 
@@ -73,6 +89,7 @@ ignore_missing_imports = True
 [mypy-srvlookup.*]
 ignore_missing_imports = True
 
+# https://github.com/twisted/treq/pull/366
 [mypy-treq.*]
 ignore_missing_imports = True
 

+ 1 - 1
synapse/api/auth/msc3861_delegated.py

@@ -59,7 +59,7 @@ def scope_to_list(scope: str) -> List[str]:
     return scope.strip().split(" ")
 
 
-class PrivateKeyJWTWithKid(PrivateKeyJWT):
+class PrivateKeyJWTWithKid(PrivateKeyJWT):  # type: ignore[misc]
     """An implementation of the private_key_jwt client auth method that includes a kid header.
 
     This is needed because some providers (Keycloak) require the kid header to figure

+ 2 - 2
synapse/federation/federation_server.py

@@ -515,7 +515,7 @@ class FederationServer(FederationBase):
                     logger.error(
                         "Failed to handle PDU %s",
                         event_id,
-                        exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore
+                        exc_info=(f.type, f.value, f.getTracebackObject()),
                     )
                     return {"error": str(e)}
 
@@ -1247,7 +1247,7 @@ class FederationServer(FederationBase):
                     logger.error(
                         "Failed to handle PDU %s",
                         event.event_id,
-                        exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore
+                        exc_info=(f.type, f.value, f.getTracebackObject()),
                     )
 
                 received_ts = await self.store.remove_received_event_from_staging(

+ 1 - 1
synapse/handlers/oidc.py

@@ -1354,7 +1354,7 @@ class OidcProvider:
         finish_request(request)
 
 
-class LogoutToken(JWTClaims):
+class LogoutToken(JWTClaims):  # type: ignore[misc]
     """
     Holds and verify claims of a logout token, as per
     https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken

+ 2 - 2
synapse/handlers/pagination.py

@@ -360,7 +360,7 @@ class PaginationHandler:
         except Exception:
             f = Failure()
             logger.error(
-                "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject())  # type: ignore
+                "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject())
             )
             self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
             self._purges_by_id[purge_id].error = f.getErrorMessage()
@@ -689,7 +689,7 @@ class PaginationHandler:
             f = Failure()
             logger.error(
                 "failed",
-                exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore
+                exc_info=(f.type, f.value, f.getTracebackObject()),
             )
             self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED
             self._delete_by_id[delete_id].error = f.getErrorMessage()

+ 7 - 7
synapse/http/server.py

@@ -108,7 +108,7 @@ def return_json_error(
 
     if f.check(SynapseError):
         # mypy doesn't understand that f.check asserts the type.
-        exc: SynapseError = f.value  # type: ignore
+        exc: SynapseError = f.value
         error_code = exc.code
         error_dict = exc.error_dict(config)
         if exc.headers is not None:
@@ -124,7 +124,7 @@ def return_json_error(
                 "Got cancellation before client disconnection from %r: %r",
                 request.request_metrics.name,
                 request,
-                exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore[arg-type]
+                exc_info=(f.type, f.value, f.getTracebackObject()),
             )
     else:
         error_code = 500
@@ -134,7 +134,7 @@ def return_json_error(
             "Failed handle request via %r: %r",
             request.request_metrics.name,
             request,
-            exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore[arg-type]
+            exc_info=(f.type, f.value, f.getTracebackObject()),
         )
 
     # Only respond with an error response if we haven't already started writing,
@@ -172,7 +172,7 @@ def return_html_error(
     """
     if f.check(CodeMessageException):
         # mypy doesn't understand that f.check asserts the type.
-        cme: CodeMessageException = f.value  # type: ignore
+        cme: CodeMessageException = f.value
         code = cme.code
         msg = cme.msg
         if cme.headers is not None:
@@ -189,7 +189,7 @@ def return_html_error(
             logger.error(
                 "Failed handle request %r",
                 request,
-                exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore[arg-type]
+                exc_info=(f.type, f.value, f.getTracebackObject()),
             )
     elif f.check(CancelledError):
         code = HTTP_STATUS_REQUEST_CANCELLED
@@ -199,7 +199,7 @@ def return_html_error(
             logger.error(
                 "Got cancellation before client disconnection when handling request %r",
                 request,
-                exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore[arg-type]
+                exc_info=(f.type, f.value, f.getTracebackObject()),
             )
     else:
         code = HTTPStatus.INTERNAL_SERVER_ERROR
@@ -208,7 +208,7 @@ def return_html_error(
         logger.error(
             "Failed handle request %r",
             request,
-            exc_info=(f.type, f.value, f.getTracebackObject()),  # type: ignore[arg-type]
+            exc_info=(f.type, f.value, f.getTracebackObject()),
         )
 
     if isinstance(error_template, str):

+ 2 - 2
synapse/util/__init__.py

@@ -76,7 +76,7 @@ def unwrapFirstError(failure: Failure) -> Failure:
     # the subFailure's value, which will do a better job of preserving stacktraces.
     # (actually, you probably want to use yieldable_gather_results anyway)
     failure.trap(defer.FirstError)
-    return failure.value.subFailure  # type: ignore[union-attr]  # Issue in Twisted's annotations
+    return failure.value.subFailure
 
 
 P = ParamSpec("P")
@@ -178,7 +178,7 @@ def log_failure(
     """
 
     logger.error(
-        msg, exc_info=(failure.type, failure.value, failure.getTracebackObject())  # type: ignore[arg-type]
+        msg, exc_info=(failure.type, failure.value, failure.getTracebackObject())
     )
 
     if not consumeErrors:

+ 1 - 1
synapse/util/async_helpers.py

@@ -138,7 +138,7 @@ class ObservableDeferred(Generic[_T], AbstractObservableDeferred[_T]):
             for observer in observers:
                 # This is a little bit of magic to correctly propagate stack
                 # traces when we `await` on one of the observer deferreds.
-                f.value.__failure__ = f  # type: ignore[union-attr]
+                f.value.__failure__ = f
                 try:
                     observer.errback(f)
                 except Exception as e:

+ 2 - 4
synapse/util/caches/lrucache.py

@@ -93,10 +93,8 @@ VT = TypeVar("VT")
 # a general type var, distinct from either KT or VT
 T = TypeVar("T")
 
-P = TypeVar("P")
 
-
-class _TimedListNode(ListNode[P]):
+class _TimedListNode(ListNode[T]):
     """A `ListNode` that tracks last access time."""
 
     __slots__ = ["last_access_ts_secs"]
@@ -821,7 +819,7 @@ class AsyncLruCache(Generic[KT, VT]):
     utilize external cache systems that require await behaviour to be created.
     """
 
-    def __init__(self, *args, **kwargs):  # type: ignore
+    def __init__(self, *args: Any, **kwargs: Any):
         self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs)
 
     async def get(

+ 1 - 1
tests/server.py

@@ -642,7 +642,7 @@ def _make_test_homeserver_synchronous(server: HomeServer) -> None:
         pool.runWithConnection = runWithConnection  # type: ignore[assignment]
         pool.runInteraction = runInteraction  # type: ignore[assignment]
         # Replace the thread pool with a threadless 'thread' pool
-        pool.threadpool = ThreadPool(clock._reactor)  # type: ignore[assignment]
+        pool.threadpool = ThreadPool(clock._reactor)
         pool.running = True
 
     # We've just changed the Databases to run DB transactions on the same