diff --git a/docs/conf.py b/docs/conf.py index 5e0761c5..290f8baf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,10 +11,13 @@ # # All configuration values have a default; values that are commented out # serve to show the default. +import logging as pylogging import os import os.path as osp import shutil +from sphinx.util import logging # type:ignore[import] + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -37,6 +40,16 @@ "sphinx_autodoc_typehints", ] + +# Workaround for https://github.com/agronholm/sphinx-autodoc-typehints/issues/123 +class FilterForIssue123(pylogging.Filter): + def filter(self, record: pylogging.LogRecord) -> bool: + return not record.getMessage().startswith("Cannot handle as a local function") + + +logging.getLogger("sphinx_autodoc_typehints").logger.addFilter(FilterForIssue123()) +# End of a workaround + try: import enchant # type:ignore[import] # noqa diff --git a/jupyter_client/client.py b/jupyter_client/client.py index 20115fd2..a52cecc7 100644 --- a/jupyter_client/client.py +++ b/jupyter_client/client.py @@ -115,7 +115,11 @@ def _context_default(self) -> zmq.Context: def __del__(self): """Handle garbage collection. Destroy context if applicable.""" - if self._created_context and self.context and not self.context.closed: + if ( + self._created_context + and self.context is not None # type:ignore[redundant-expr] + and not self.context.closed + ): if self.channels_running: if self.log: self.log.warning("Could not destroy zmq context for %s", self) @@ -349,7 +353,9 @@ def shell_channel(self) -> t.Any: url = self._make_url("shell") self.log.debug("connecting shell channel to %s", url) socket = self.connect_shell(identity=self.session.bsession) - self._shell_channel = self.shell_channel_class(socket, self.session, self.ioloop) + self._shell_channel = self.shell_channel_class( + socket, self.session, self.ioloop + ) # type:ignore[operator] return self._shell_channel @property @@ -359,7 +365,9 @@ def iopub_channel(self) -> t.Any: url = self._make_url("iopub") self.log.debug("connecting iopub channel to %s", url) socket = self.connect_iopub() - self._iopub_channel = self.iopub_channel_class(socket, self.session, self.ioloop) + self._iopub_channel = self.iopub_channel_class( + socket, self.session, self.ioloop + ) # type:ignore[operator] return self._iopub_channel @property @@ -369,7 +377,9 @@ def stdin_channel(self) -> t.Any: url = self._make_url("stdin") self.log.debug("connecting stdin channel to %s", url) socket = self.connect_stdin(identity=self.session.bsession) - self._stdin_channel = self.stdin_channel_class(socket, self.session, self.ioloop) + self._stdin_channel = self.stdin_channel_class( + socket, self.session, self.ioloop + ) # type:ignore[operator] return self._stdin_channel @property @@ -378,7 +388,9 @@ def hb_channel(self) -> t.Any: if self._hb_channel is None: url = self._make_url("hb") self.log.debug("connecting heartbeat channel to %s", url) - self._hb_channel = self.hb_channel_class(self.context, self.session, url) + self._hb_channel = self.hb_channel_class( + self.context, self.session, url + ) # type:ignore[operator] return self._hb_channel @property @@ -388,7 +400,9 @@ def control_channel(self) -> t.Any: url = self._make_url("control") self.log.debug("connecting control channel to %s", url) socket = self.connect_control(identity=self.session.bsession) - self._control_channel = self.control_channel_class(socket, self.session, self.ioloop) + self._control_channel = self.control_channel_class( + socket, self.session, self.ioloop + ) # type:ignore[operator] return self._control_channel async def _async_is_alive(self) -> bool: diff --git a/jupyter_client/connect.py b/jupyter_client/connect.py index 6ebec00c..74e46737 100644 --- a/jupyter_client/connect.py +++ b/jupyter_client/connect.py @@ -341,7 +341,7 @@ def _data_dir_default(self): to the Kernel, so be careful!""", ) - def _ip_default(self): + def _ip_default(self) -> str: if self.transport == "ipc": if self.connection_file: return os.path.splitext(self.connection_file)[0] + "-ipc" @@ -426,7 +426,7 @@ def get_connection_info(self, session: bool = False) -> KernelConnectionInfo: def blocking_client(self): """Make a blocking client connected to my kernel""" info = self.get_connection_info() - bc = self.blocking_class(parent=self) + bc = self.blocking_class(parent=self) # type:ignore[operator] bc.load_connection_info(info) return bc @@ -540,7 +540,7 @@ def load_connection_info(self, info: KernelConnectionInfo) -> None: See the connection_file spec for details. """ self.transport = info.get("transport", self.transport) - self.ip = info.get("ip", self._ip_default()) + self.ip = info.get("ip", self._ip_default()) # type:ignore[assignment] self._record_random_port_names() for name in port_names: diff --git a/jupyter_client/consoleapp.py b/jupyter_client/consoleapp.py index 2d06ed07..d4288bec 100644 --- a/jupyter_client/consoleapp.py +++ b/jupyter_client/consoleapp.py @@ -20,6 +20,7 @@ from . import KernelManager, connect, find_connection_file, tunnel_to_kernel from .blocking import BlockingKernelClient +from .connect import KernelConnectionInfo from .kernelspec import NoSuchKernel from .localinterfaces import localhost from .restarter import KernelRestarter @@ -234,7 +235,7 @@ def init_ssh(self) -> None: ip = localhost() # build connection dict for tunnels: - info = { + info: KernelConnectionInfo = { "ip": ip, "shell_port": self.shell_port, "iopub_port": self.iopub_port, @@ -293,7 +294,7 @@ def init_kernel_manager(self) -> None: # Create a KernelManager and start a kernel. try: - self.kernel_manager = self.kernel_manager_class( + self.kernel_manager = self.kernel_manager_class( # type:ignore[operator] ip=self.ip, session=self.session, transport=self.transport, diff --git a/jupyter_client/ioloop/manager.py b/jupyter_client/ioloop/manager.py index a3f07211..5b5c3dc4 100644 --- a/jupyter_client/ioloop/manager.py +++ b/jupyter_client/ioloop/manager.py @@ -56,7 +56,7 @@ def start_restarter(self): """Start the restarter.""" if self.autorestart and self.has_kernel: if self._restarter is None: - self._restarter = self.restarter_class( + self._restarter = self.restarter_class( # type:ignore[operator] kernel_manager=self, loop=self.loop, parent=self, log=self.log ) self._restarter.start() @@ -99,7 +99,7 @@ def start_restarter(self): """Start the restarter.""" if self.autorestart and self.has_kernel: if self._restarter is None: - self._restarter = self.restarter_class( + self._restarter = self.restarter_class( # type:ignore[operator] kernel_manager=self, loop=self.loop, parent=self, log=self.log ) self._restarter.start() diff --git a/jupyter_client/kernelspec.py b/jupyter_client/kernelspec.py index 26c36865..3ba91367 100644 --- a/jupyter_client/kernelspec.py +++ b/jupyter_client/kernelspec.py @@ -238,9 +238,13 @@ def _get_kernel_spec_by_name(self, kernel_name, resource_dir): pass else: if resource_dir == RESOURCES: - kspec = self.kernel_spec_class(resource_dir=resource_dir, **get_kernel_dict()) + kspec = self.kernel_spec_class( + resource_dir=resource_dir, **get_kernel_dict() + ) # type:ignore[operator] if not kspec: - kspec = self.kernel_spec_class.from_resource_dir(resource_dir) + kspec = self.kernel_spec_class.from_resource_dir( # type:ignore[attr-defined] + resource_dir + ) if not KPF.instance(parent=self.parent).is_provisioner_available(kspec): raise NoSuchKernel(kernel_name) diff --git a/jupyter_client/kernelspecapp.py b/jupyter_client/kernelspecapp.py index e9b14703..b97530e0 100644 --- a/jupyter_client/kernelspecapp.py +++ b/jupyter_client/kernelspecapp.py @@ -115,7 +115,7 @@ def _kernel_name_default(self): "name": "InstallKernelSpec.kernel_name", "prefix": "InstallKernelSpec.prefix", } - aliases.update(base_aliases) + aliases.update(base_aliases) # type:ignore[arg-type] flags = { "user": ( @@ -185,7 +185,7 @@ def _kernel_spec_manager_default(self): flags = { "f": ({"RemoveKernelSpec": {"force": True}}, force.help), } - flags.update(JupyterApp.flags) + flags.update(JupyterApp.flags) # type:ignore[has-type] def parse_command_line(self, argv): """Parse the command line args.""" diff --git a/jupyter_client/manager.py b/jupyter_client/manager.py index 41d63140..598f49f2 100644 --- a/jupyter_client/manager.py +++ b/jupyter_client/manager.py @@ -117,7 +117,7 @@ def __init__(self, *args, **kwargs): # The PyZMQ Context to use for communication with the kernel. context: Instance = Instance(zmq.Context) - @default("context") # type:ignore[misc] + @default("context") def _context_default(self) -> zmq.Context: self._created_context = True return zmq.Context() @@ -128,11 +128,11 @@ def _context_default(self) -> zmq.Context: ) client_factory: Type = Type(klass=KernelClient) - @default("client_factory") # type:ignore[misc] + @default("client_factory") def _client_factory_default(self) -> Type: return import_item(self.client_class) - @observe("client_class") # type:ignore[misc] + @observe("client_class") def _client_class_changed(self, change: t.Dict[str, DottedObjectName]) -> None: self.client_factory = import_item(str(change["new"])) @@ -145,11 +145,11 @@ def _client_class_changed(self, change: t.Dict[str, DottedObjectName]) -> None: kernel_spec_manager: Instance = Instance(kernelspec.KernelSpecManager) - @default("kernel_spec_manager") # type:ignore[misc] + @default("kernel_spec_manager") def _kernel_spec_manager_default(self) -> kernelspec.KernelSpecManager: return kernelspec.KernelSpecManager(data_dir=self.data_dir) - @observe("kernel_spec_manager") # type:ignore[misc] + @observe("kernel_spec_manager") @observe_compat # type:ignore[misc] def _kernel_spec_manager_changed(self, change: t.Dict[str, Instance]) -> None: self._kernel_spec = None @@ -170,7 +170,7 @@ def _kernel_spec_manager_changed(self, change: t.Dict[str, Instance]) -> None: kernel_name: t.Union[str, Unicode] = Unicode(kernelspec.NATIVE_KERNEL_NAME) - @observe("kernel_name") # type:ignore[misc] + @observe("kernel_name") def _kernel_name_changed(self, change: t.Dict[str, str]) -> None: self._kernel_spec = None if change["new"] == "python": @@ -190,7 +190,7 @@ def kernel_spec(self) -> t.Optional[kernelspec.KernelSpec]: help="True if the MultiKernelManager should cache ports for this KernelManager instance", ) - @default("cache_ports") # type:ignore[misc] + @default("cache_ports") def _default_cache_ports(self) -> bool: return self.transport == "tcp" @@ -688,7 +688,7 @@ class AsyncKernelManager(KernelManager): # The PyZMQ Context to use for communication with the kernel. context: Instance = Instance(zmq.asyncio.Context) - @default("context") # type:ignore[misc] + @default("context") def _context_default(self) -> zmq.asyncio.Context: self._created_context = True return zmq.asyncio.Context() diff --git a/jupyter_client/multikernelmanager.py b/jupyter_client/multikernelmanager.py index 36031f68..2ebd0e9d 100644 --- a/jupyter_client/multikernelmanager.py +++ b/jupyter_client/multikernelmanager.py @@ -102,7 +102,7 @@ def _starting_kernels(self): """A shim for backwards compatibility.""" return self._pending_kernels - @default("context") # type:ignore[misc] + @default("context") def _context_default(self) -> zmq.Context: self._created_context = True return zmq.Context() @@ -602,7 +602,7 @@ class AsyncMultiKernelManager(MultiKernelManager): context = Instance("zmq.asyncio.Context") - @default("context") # type:ignore[misc] + @default("context") def _context_default(self) -> zmq.asyncio.Context: self._created_context = True return zmq.asyncio.Context() diff --git a/jupyter_client/runapp.py b/jupyter_client/runapp.py index fb115852..9013f25b 100644 --- a/jupyter_client/runapp.py +++ b/jupyter_client/runapp.py @@ -35,7 +35,7 @@ frontend_flags = set(frontend_flags_dict.keys()) -class RunApp(JupyterApp, JupyterConsoleApp): +class RunApp(JupyterApp, JupyterConsoleApp): # type:ignore[misc] """An Jupyter Console app to run files.""" version = __version__ diff --git a/jupyter_client/session.py b/jupyter_client/session.py index 8dbb364c..16373ce3 100644 --- a/jupyter_client/session.py +++ b/jupyter_client/session.py @@ -10,6 +10,8 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import hashlib import hmac import json @@ -25,8 +27,6 @@ from hmac import compare_digest # We are using compare_digest to limit the surface of timing attacks -from typing import Optional, Union - import zmq.asyncio from tornado.ioloop import IOLoop from traitlets import ( @@ -212,7 +212,7 @@ class SessionFactory(LoggingConfigurable): logname = Unicode("") - @observe("logname") # type:ignore[misc] + @observe("logname") def _logname_changed(self, change: t.Any) -> None: self.log = logging.getLogger(change["new"]) @@ -244,7 +244,7 @@ class Message: A Message can be created from a dict and a dict from a Message instance simply by calling dict(msg_obj).""" - def __init__(self, msg_dict: t.Dict[str, t.Any]) -> None: + def __init__(self, msg_dict: dict[str, t.Any]) -> None: """Initialize a message.""" dct = self.__dict__ for k, v in dict(msg_dict).items(): @@ -269,14 +269,16 @@ def __getitem__(self, k: str) -> t.Any: return self.__dict__[k] -def msg_header(msg_id: str, msg_type: str, username: str, session: "Session") -> t.Dict[str, t.Any]: +def msg_header( + msg_id: str, msg_type: str, username: str, session: Session | str +) -> dict[str, t.Any]: """Create a new message header""" date = utcnow() version = protocol_version return locals() -def extract_header(msg_or_header: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: +def extract_header(msg_or_header: dict[str, t.Any]) -> dict[str, t.Any]: """Given a message or header, return the header.""" if not msg_or_header: return {} @@ -575,7 +577,7 @@ def __init__(self, **kwargs): "Message signing is disabled. This is insecure and not recommended!" ) - def clone(self) -> "Session": + def clone(self) -> Session: """Create a copy of this Session Useful when connecting multiple times to a given kernel. @@ -640,18 +642,18 @@ def _check_packers(self) -> None: self.pack = lambda o: pack(squash_dates(o)) self.unpack = lambda s: unpack(s) - def msg_header(self, msg_type: str) -> t.Dict[str, t.Any]: + def msg_header(self, msg_type: str) -> dict[str, t.Any]: """Create a header for a message type.""" return msg_header(self.msg_id, msg_type, self.username, self.session) def msg( self, msg_type: str, - content: t.Optional[t.Dict] = None, - parent: t.Optional[t.Dict[str, t.Any]] = None, - header: t.Optional[t.Dict[str, t.Any]] = None, - metadata: t.Optional[t.Dict[str, t.Any]] = None, - ) -> t.Dict[str, t.Any]: + content: dict | None = None, + parent: dict[str, t.Any] | None = None, + header: dict[str, t.Any] | None = None, + metadata: dict[str, t.Any] | None = None, + ) -> dict[str, t.Any]: """Return the nested message dict. This format is different from what is sent over the wire. The @@ -670,7 +672,7 @@ def msg( msg["metadata"].update(metadata) return msg - def sign(self, msg_list: t.List) -> bytes: + def sign(self, msg_list: list) -> bytes: """Sign a message with HMAC digest. If no auth, return b''. Parameters @@ -687,9 +689,9 @@ def sign(self, msg_list: t.List) -> bytes: def serialize( self, - msg: t.Dict[str, t.Any], - ident: t.Optional[t.Union[t.List[bytes], bytes]] = None, - ) -> t.List[bytes]: + msg: dict[str, t.Any], + ident: list[bytes] | bytes | None = None, + ) -> list[bytes]: """Serialize the message components to bytes. This is roughly the inverse of deserialize. The serialize/deserialize @@ -751,16 +753,16 @@ def serialize( def send( self, - stream: Optional[Union[zmq.sugar.socket.Socket, ZMQStream]], - msg_or_type: t.Union[t.Dict[str, t.Any], str], - content: t.Optional[t.Dict[str, t.Any]] = None, - parent: t.Optional[t.Dict[str, t.Any]] = None, - ident: t.Optional[t.Union[bytes, t.List[bytes]]] = None, - buffers: t.Optional[t.List[bytes]] = None, + stream: zmq.sugar.socket.Socket | ZMQStream | None, + msg_or_type: dict[str, t.Any] | str, + content: dict[str, t.Any] | None = None, + parent: dict[str, t.Any] | None = None, + ident: bytes | list[bytes] | None = None, + buffers: list[bytes] | None = None, track: bool = False, - header: t.Optional[t.Dict[str, t.Any]] = None, - metadata: t.Optional[t.Dict[str, t.Any]] = None, - ) -> t.Optional[t.Dict[str, t.Any]]: + header: dict[str, t.Any] | None = None, + metadata: dict[str, t.Any] | None = None, + ) -> dict[str, t.Any] | None: """Build and send a message via stream or socket. The message format used by this function internally is as follows: @@ -872,10 +874,10 @@ def send( def send_raw( self, stream: zmq.sugar.socket.Socket, - msg_list: t.List, + msg_list: list, flags: int = 0, copy: bool = True, - ident: t.Optional[t.Union[bytes, t.List[bytes]]] = None, + ident: bytes | list[bytes] | None = None, ) -> None: """Send a raw message via ident path. @@ -912,7 +914,7 @@ def recv( mode: int = zmq.NOBLOCK, content: bool = True, copy: bool = True, - ) -> t.Tuple[t.Optional[t.List[bytes]], t.Optional[t.Dict[str, t.Any]]]: + ) -> tuple[list[bytes] | None, dict[str, t.Any] | None]: """Receive and unpack a message. Parameters @@ -950,8 +952,8 @@ def recv( raise e def feed_identities( - self, msg_list: t.Union[t.List[bytes], t.List[zmq.Message]], copy: bool = True - ) -> t.Tuple[t.List[bytes], t.Union[t.List[bytes], t.List[zmq.Message]]]: + self, msg_list: list[bytes] | list[zmq.Message], copy: bool = True + ) -> tuple[list[bytes], list[bytes] | list[zmq.Message]]: """Split the identities from the rest of the message. Feed until DELIM is reached, then return the prefix as idents and @@ -1017,10 +1019,10 @@ def _cull_digest_history(self) -> None: def deserialize( self, - msg_list: t.Union[t.List[bytes], t.List[zmq.Message]], + msg_list: list[bytes] | list[zmq.Message], content: bool = True, copy: bool = True, - ) -> t.Dict[str, t.Any]: + ) -> dict[str, t.Any]: """Unserialize a msg_list to a nested message dict. This is roughly the inverse of serialize. The serialize/deserialize @@ -1092,7 +1094,7 @@ def deserialize( # adapt to the current version return adapt(message) - def unserialize(self, *args: t.Any, **kwargs: t.Any) -> t.Dict[str, t.Any]: + def unserialize(self, *args: t.Any, **kwargs: t.Any) -> dict[str, t.Any]: """**DEPRECATED** Use deserialize instead.""" # pragma: no cover warnings.warn( diff --git a/jupyter_client/threaded.py b/jupyter_client/threaded.py index d4cec533..0479b85e 100644 --- a/jupyter_client/threaded.py +++ b/jupyter_client/threaded.py @@ -299,7 +299,8 @@ class ThreadedKernelClient(KernelClient): @property def ioloop(self): - return self.ioloop_thread.ioloop + if self.ioloop_thread: + return self.ioloop_thread.ioloop ioloop_thread = Instance(IOLoopThread, allow_none=True) @@ -329,7 +330,7 @@ def _check_kernel_info_reply(self, msg: Dict[str, Any]) -> None: def stop_channels(self) -> None: """Stop the channels on the client.""" super().stop_channels() - if self.ioloop_thread.is_alive(): + if self.ioloop_thread and self.ioloop_thread.is_alive(): self.ioloop_thread.stop() iopub_channel_class = Type(ThreadedZMQSocketChannel) diff --git a/pyproject.toml b/pyproject.toml index ce6f70af..67561c2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,7 +103,7 @@ nowarn = "test -W default {args}" [tool.hatch.envs.typing] features = ["test"] -dependencies = ["mypy>=1.5.1"] +dependencies = ["mypy>=1.5.1", "traitlets>=5.10.1"] [tool.hatch.envs.typing.scripts] test = "mypy --install-types --non-interactive {args:.}" diff --git a/tests/problemkernel.py b/tests/problemkernel.py index a55648f5..a20cf708 100644 --- a/tests/problemkernel.py +++ b/tests/problemkernel.py @@ -18,7 +18,7 @@ class ProblemTestKernel(Kernel): class ProblemTestApp(IPKernelApp): - kernel_class = ProblemTestKernel + kernel_class = ProblemTestKernel # type:ignore[assignment] def init_io(self): # Overridden to disable stdout/stderr capture diff --git a/tests/signalkernel.py b/tests/signalkernel.py index 887d4d32..65fdb687 100644 --- a/tests/signalkernel.py +++ b/tests/signalkernel.py @@ -62,7 +62,7 @@ def do_execute( class SignalTestApp(IPKernelApp): - kernel_class = SignalTestKernel + kernel_class = SignalTestKernel # type:ignore[assignment] def init_io(self): # Overridden to disable stdout/stderr capture diff --git a/tests/test_kernelspec.py b/tests/test_kernelspec.py index 115d9a29..480d13ae 100644 --- a/tests/test_kernelspec.py +++ b/tests/test_kernelspec.py @@ -41,13 +41,13 @@ def test_find_kernel_specs(self): def test_allowed_kernel_names(self): ksm = kernelspec.KernelSpecManager() - ksm.allowed_kernelspecs = ["foo"] + ksm.allowed_kernelspecs = {"foo"} kernels = ksm.find_kernel_specs() assert not len(kernels) def test_deprecated_whitelist(self): ksm = kernelspec.KernelSpecManager() - ksm.whitelist = ["bar"] + ksm.whitelist = {"bar"} kernels = ksm.find_kernel_specs() assert not len(kernels)