diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 784cd57..bbec63e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,6 +4,7 @@ on: push: branches: ["main"] pull_request: + workflow_dispatch: schedule: - cron: "0 8 * * *" @@ -18,8 +19,8 @@ jobs: steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - name: Run Linters - run: | + - name: Run Linters + run: | hatch run typing:test hatch run lint:style pipx run interrogate -v . @@ -36,11 +37,12 @@ jobs: build: name: Build, test and code coverage runs-on: ${{ matrix.os }} + timeout-minutes: 30 strategy: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [ '3.8', '3.9', '3.10', "3.11" ] + python-version: ["3.8", "3.9", "3.10", "3.11"] exclude: - os: windows-latest python-version: 3.8 @@ -113,9 +115,9 @@ jobs: check_links: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 check_release: runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a397d5e..aad4bc2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,6 @@ ci: autoupdate_schedule: monthly + autoupdate_commit_msg: "chore: update pre-commit hooks" repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -15,6 +16,7 @@ repos: - id: check-json - id: check-toml - id: check-yaml + - id: debug-statements - id: end-of-file-fixer - id: trailing-whitespace @@ -30,13 +32,44 @@ repos: additional_dependencies: [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] - - repo: https://github.com/psf/black - rev: 23.9.1 + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.0.2" + hooks: + - id: prettier + types_or: [yaml, html, json] + + - repo: https://github.com/adamchainz/blacken-docs + rev: "1.16.0" + hooks: + - id: blacken-docs + additional_dependencies: [black==23.7.0] + + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.7.0 hooks: - id: black + - repo: https://github.com/codespell-project/codespell + rev: "v2.2.5" + hooks: + - id: codespell + args: ["-L", "sur,nd"] + + - repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" + hooks: + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal + - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.0.292 hooks: - id: ruff - args: ["--fix"] + args: ["--fix", "--show-fixes"] + + - repo: https://github.com/scientific-python/cookie + rev: "2023.08.23" + hooks: + - id: sp-repo-review + additional_dependencies: ["repo-review[cli]"] diff --git a/.readthedocs.yml b/.readthedocs.yml index e63c9aa..3c4b489 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,4 +20,4 @@ python: build: os: ubuntu-22.04 tools: - python: "3.11" + python: "3.11" diff --git a/docs/client.rst b/docs/client.rst index 9dd581f..ad2f338 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -138,7 +138,7 @@ there are no execution errors. But, what if there are errors? Execution until first error ~~~~~~~~~~~~~~~~~~~~~~~~~~~ An error during the notebook execution, by default, will stop the execution -and raise a `CellExecutionError`. Conveniently, the source cell causing +and raise a ``CellExecutionError``. Conveniently, the source cell causing the error and the original error name and message are also printed. After an error, we can still save the notebook as before:: diff --git a/nbclient/__init__.py b/nbclient/__init__.py index 0afe16a..30c13d7 100644 --- a/nbclient/__init__.py +++ b/nbclient/__init__.py @@ -1,15 +1,4 @@ -import sys +from ._version import __version__, version_info +from .client import NotebookClient, execute -from ._version import __version__, version_info # noqa # noqa -from .client import NotebookClient, execute # noqa: F401 - - -def _cleanup() -> None: - pass - - -# patch subprocess on Windows for python<3.7 -# see https://bugs.python.org/issue37380 -# the fix for python3.7: https://github.com/python/cpython/pull/15706/files -if sys.platform == 'win32': - pass +__all__ = ["__version__", "version_info", "NotebookClient", "execute"] diff --git a/nbclient/cli.py b/nbclient/cli.py index cba7154..1007b87 100644 --- a/nbclient/cli.py +++ b/nbclient/cli.py @@ -1,7 +1,10 @@ """nbclient cli.""" +from __future__ import annotations + import logging import pathlib import sys +import typing from textwrap import dedent import nbformat @@ -13,13 +16,15 @@ from .client import NotebookClient -nbclient_aliases: dict = { +# mypy: disable-error-code="no-untyped-call" + +nbclient_aliases: dict[str, str] = { 'timeout': 'NbClientApp.timeout', 'startup_timeout': 'NbClientApp.startup_timeout', 'kernel_name': 'NbClientApp.kernel_name', } -nbclient_flags: dict = { +nbclient_flags: dict[str, typing.Any] = { 'allow-errors': ( { 'NbClientApp': { @@ -43,7 +48,7 @@ class NbClientApp(JupyterApp): description = "An application used to execute notebook files (*.ipynb)" notebooks = List([], help="Path of notebooks to convert").tag(config=True) - timeout: int = Integer( + timeout = Integer( None, allow_none=True, help=dedent( @@ -54,7 +59,7 @@ class NbClientApp(JupyterApp): """ ), ).tag(config=True) - startup_timeout: int = Integer( + startup_timeout = Integer( 60, help=dedent( """ @@ -64,7 +69,7 @@ class NbClientApp(JupyterApp): """ ), ).tag(config=True) - allow_errors: bool = Bool( + allow_errors = Bool( False, help=dedent( """ @@ -76,7 +81,7 @@ class NbClientApp(JupyterApp): """ ), ).tag(config=True) - skip_cells_with_tag: str = Unicode( + skip_cells_with_tag = Unicode( 'skip-execution', help=dedent( """ @@ -84,7 +89,7 @@ class NbClientApp(JupyterApp): """ ), ).tag(config=True) - kernel_name: str = Unicode( + kernel_name = Unicode( '', help=dedent( """ @@ -95,11 +100,11 @@ class NbClientApp(JupyterApp): ).tag(config=True) @default('log_level') - def _log_level_default(self): + def _log_level_default(self) -> int: return logging.INFO @catch_config_error - def initialize(self, argv=None): + def initialize(self, argv: list[str] | None = None) -> None: """Initialize the app.""" super().initialize(argv) @@ -111,9 +116,10 @@ def initialize(self, argv=None): sys.exit(-1) # Loop and run them one by one - [self.run_notebook(path) for path in self.notebooks] + for path in self.notebooks: + self.run_notebook(path) - def get_notebooks(self): + def get_notebooks(self) -> list[str]: """Get the notebooks for the app.""" # If notebooks were provided from the command line, use those if self.extra_args: @@ -125,7 +131,7 @@ def get_notebooks(self): # Return what we got. return notebooks - def run_notebook(self, notebook_path): + def run_notebook(self, notebook_path: str) -> None: """Run a notebook by path.""" # Log it self.log.info(f"Executing {notebook_path}") diff --git a/nbclient/client.py b/nbclient/client.py index 0fc14f6..5450cf3 100644 --- a/nbclient/client.py +++ b/nbclient/client.py @@ -1,4 +1,6 @@ """nbclient implementation.""" +from __future__ import annotations + import asyncio import atexit import base64 @@ -12,8 +14,8 @@ from textwrap import dedent from time import monotonic -from jupyter_client import KernelManager from jupyter_client.client import KernelClient +from jupyter_client.manager import KernelManager from nbformat import NotebookNode from nbformat.v4 import output_from_msg from traitlets import Any, Bool, Callable, Dict, Enum, Integer, List, Type, Unicode, default @@ -32,8 +34,10 @@ _RGX_CARRIAGERETURN = re.compile(r".*\r(?=[^\n])") _RGX_BACKSPACE = re.compile(r"[^\n]\b") +# mypy: disable-error-code="no-untyped-call" + -def timestamp(msg: t.Optional[t.Dict] = None) -> str: +def timestamp(msg: dict[str, t.Any] | None = None) -> str: """Get the timestamp for a message.""" if msg and 'header' in msg: # The test mocks don't provide a header, so tolerate that msg_header = msg['header'] @@ -58,7 +62,7 @@ class NotebookClient(LoggingConfigurable): Encompasses a Client for executing cells in a notebook """ - timeout: int = Integer( + timeout = Integer( None, allow_none=True, help=dedent( @@ -72,7 +76,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - timeout_func: t.Callable[..., t.Optional[int]] = Any( + timeout_func: t.Callable[..., int | None] | None = Any( # type:ignore[assignment] default_value=None, allow_none=True, help=dedent( @@ -90,7 +94,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - interrupt_on_timeout: bool = Bool( + interrupt_on_timeout = Bool( False, help=dedent( """ @@ -101,7 +105,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - error_on_timeout: t.Optional[t.Dict] = Dict( + error_on_timeout = Dict( default_value=None, allow_none=True, help=dedent( @@ -120,7 +124,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - startup_timeout: int = Integer( + startup_timeout = Integer( 60, help=dedent( """ @@ -131,7 +135,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - allow_errors: bool = Bool( + allow_errors = Bool( False, help=dedent( """ @@ -146,7 +150,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - allow_error_names: t.List[str] = List( + allow_error_names = List( Unicode(), help=dedent( """ @@ -157,7 +161,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - force_raise_errors: bool = Bool( + force_raise_errors = Bool( False, help=dedent( """ @@ -175,7 +179,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - skip_cells_with_tag: str = Unicode( + skip_cells_with_tag = Unicode( 'skip-execution', help=dedent( """ @@ -184,9 +188,9 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - extra_arguments: t.List = List(Unicode()).tag(config=True) + extra_arguments = List(Unicode()).tag(config=True) - kernel_name: str = Unicode( + kernel_name = Unicode( '', help=dedent( """ @@ -196,7 +200,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - raise_on_iopub_timeout: bool = Bool( + raise_on_iopub_timeout = Bool( False, help=dedent( """ @@ -211,7 +215,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - store_widget_state: bool = Bool( + store_widget_state = Bool( True, help=dedent( """ @@ -221,7 +225,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - record_timing: bool = Bool( + record_timing = Bool( True, help=dedent( """ @@ -231,7 +235,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - iopub_timeout: int = Integer( + iopub_timeout = Integer( 4, allow_none=False, help=dedent( @@ -244,7 +248,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - shell_timeout_interval: int = Integer( + shell_timeout_interval = Integer( 5, allow_none=False, help=dedent( @@ -269,7 +273,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - ipython_hist_file: str = Unicode( + ipython_hist_file = Unicode( default_value=':memory:', help="""Path to file to use for SQLite history database for an IPython kernel. @@ -287,7 +291,7 @@ class NotebookClient(LoggingConfigurable): config=True, klass=KernelManager, help='The kernel manager class to use.' ) - on_notebook_start: t.Optional[t.Callable] = Callable( + on_notebook_start = Callable( default_value=None, allow_none=True, help=dedent( @@ -299,7 +303,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_notebook_complete: t.Optional[t.Callable] = Callable( + on_notebook_complete = Callable( default_value=None, allow_none=True, help=dedent( @@ -310,7 +314,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_notebook_error: t.Optional[t.Callable] = Callable( + on_notebook_error = Callable( default_value=None, allow_none=True, help=dedent( @@ -321,7 +325,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_cell_start: t.Optional[t.Callable] = Callable( + on_cell_start = Callable( default_value=None, allow_none=True, help=dedent( @@ -333,7 +337,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_cell_execute: t.Optional[t.Callable] = Callable( + on_cell_execute = Callable( default_value=None, allow_none=True, help=dedent( @@ -344,7 +348,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_cell_complete: t.Optional[t.Callable] = Callable( + on_cell_complete = Callable( default_value=None, allow_none=True, help=dedent( @@ -356,7 +360,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_cell_executed: t.Optional[t.Callable] = Callable( + on_cell_executed = Callable( default_value=None, allow_none=True, help=dedent( @@ -368,7 +372,7 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - on_cell_error: t.Optional[t.Callable] = Callable( + on_cell_error = Callable( default_value=None, allow_none=True, help=dedent( @@ -380,14 +384,14 @@ class NotebookClient(LoggingConfigurable): ), ).tag(config=True) - @default('kernel_manager_class') # type:ignore[misc] - def _kernel_manager_class_default(self) -> t.Type[KernelManager]: + @default('kernel_manager_class') + def _kernel_manager_class_default(self) -> type[KernelManager]: """Use a dynamic default to avoid importing jupyter_client at startup""" - from jupyter_client import AsyncKernelManager + from jupyter_client import AsyncKernelManager # type:ignore[attr-defined] return AsyncKernelManager - _display_id_map = Dict( + _display_id_map: dict[str, t.Any] = Dict( # type:ignore[assignment] help=dedent( """ mapping of locations of outputs with a given display_id @@ -402,7 +406,7 @@ def _kernel_manager_class_default(self) -> t.Type[KernelManager]: ) ) - display_data_priority: t.List = List( + display_data_priority = List( [ 'text/html', 'application/pdf', @@ -420,7 +424,7 @@ def _kernel_manager_class_default(self) -> t.Type[KernelManager]: """, ).tag(config=True) - resources = Dict( + resources: dict[str, t.Any] = Dict( # type:ignore[assignment] help=dedent( """ Additional resources used in the conversion process. For example, @@ -438,7 +442,7 @@ def _kernel_manager_class_default(self) -> t.Type[KernelManager]: ) ) - def __init__(self, nb: NotebookNode, km: t.Optional[KernelManager] = None, **kw: t.Any) -> None: + def __init__(self, nb: NotebookNode, km: KernelManager | None = None, **kw: t.Any) -> None: """Initializes the execution manager. Parameters @@ -451,30 +455,30 @@ def __init__(self, nb: NotebookNode, km: t.Optional[KernelManager] = None, **kw: """ super().__init__(**kw) self.nb: NotebookNode = nb - self.km: t.Optional[KernelManager] = km + self.km: KernelManager | None = km self.owns_km: bool = km is None # whether the NotebookClient owns the kernel manager - self.kc: t.Optional[KernelClient] = None + self.kc: KernelClient | None = None self.reset_execution_trackers() - self.widget_registry: t.Dict[str, t.Dict] = { + self.widget_registry: dict[str, dict[str, t.Any]] = { '@jupyter-widgets/output': {'OutputModel': OutputWidget} } # comm_open_handlers should return an object with a .handle_msg(msg) method or None - self.comm_open_handlers: t.Dict[str, t.Any] = { + self.comm_open_handlers: dict[str, t.Any] = { 'jupyter.widget': self.on_comm_open_jupyter_widget } def reset_execution_trackers(self) -> None: """Resets any per-execution trackers.""" - self.task_poll_for_reply: t.Optional[asyncio.Future] = None + self.task_poll_for_reply: asyncio.Future[t.Any] | None = None self.code_cells_executed = 0 self._display_id_map = {} - self.widget_state: t.Dict[str, t.Dict] = {} - self.widget_buffers: t.Dict[str, t.Dict[t.Tuple[str, ...], t.Dict[str, str]]] = {} + self.widget_state: dict[str, dict[str, t.Any]] = {} + self.widget_buffers: dict[str, dict[tuple[str, ...], dict[str, str]]] = {} # maps to list of hooks, where the last is used, this is used # to support nested use of output widgets. self.output_hook_stack: t.Any = collections.defaultdict(list) # our front-end mimicking Output widgets - self.comm_objects: t.Dict[str, t.Any] = {} + self.comm_objects: dict[str, t.Any] = {} def create_kernel_manager(self) -> KernelManager: """Creates a new kernel manager. @@ -490,9 +494,11 @@ def create_kernel_manager(self) -> KernelManager: self.kernel_name = kn if not self.kernel_name: - self.km = self.kernel_manager_class(config=self.config) + self.km = self.kernel_manager_class(config=self.config) # type:ignore[operator] else: - self.km = self.kernel_manager_class(kernel_name=self.kernel_name, config=self.config) + self.km = self.kernel_manager_class( + kernel_name=self.kernel_name, config=self.config + ) # type:ignore[operator] assert self.km is not None return self.km @@ -575,7 +581,7 @@ async def async_start_new_kernel_client(self) -> KernelClient: start_new_kernel_client = run_sync(async_start_new_kernel_client) @contextmanager - def setup_kernel(self, **kwargs: t.Any) -> t.Generator: + def setup_kernel(self, **kwargs: t.Any) -> t.Generator[None, None, None]: """ Context manager for setting up the kernel to execute a notebook. @@ -605,7 +611,7 @@ def setup_kernel(self, **kwargs: t.Any) -> t.Generator: self._cleanup_kernel() @asynccontextmanager - async def async_setup_kernel(self, **kwargs: t.Any) -> t.AsyncGenerator: + async def async_setup_kernel(self, **kwargs: t.Any) -> t.AsyncGenerator[None, None]: """ Context manager for setting up the kernel to execute a notebook. @@ -626,7 +632,7 @@ async def async_setup_kernel(self, **kwargs: t.Any) -> t.AsyncGenerator: # This is necessary as the ioloop has stopped once atexit fires. atexit.register(self._cleanup_kernel) - def on_signal(): + def on_signal() -> None: """Handle signals.""" self._async_cleanup_kernel_future = asyncio.ensure_future(self._async_cleanup_kernel()) atexit.unregister(self._cleanup_kernel) @@ -732,7 +738,7 @@ def set_widgets_metadata(self) -> None: if buffers: widget['buffers'] = list(buffers.values()) - def _update_display_id(self, display_id: str, msg: t.Dict) -> None: + def _update_display_id(self, display_id: str, msg: dict[str, t.Any]) -> None: """Update outputs with a given display_id""" if display_id not in self._display_id_map: self.log.debug("display id %r not in %s", display_id, self._display_id_map) @@ -758,13 +764,13 @@ async def _async_poll_for_reply( self, msg_id: str, cell: NotebookNode, - timeout: t.Optional[int], - task_poll_output_msg: asyncio.Future, - task_poll_kernel_alive: asyncio.Future, - ) -> t.Dict: - msg: t.Dict + timeout: int | None, + task_poll_output_msg: asyncio.Future[t.Any], + task_poll_kernel_alive: asyncio.Future[t.Any], + ) -> dict[str, t.Any]: + msg: dict[str, t.Any] assert self.kc is not None - new_timeout: t.Optional[float] = None + new_timeout: float | None = None if timeout is not None: deadline = monotonic() + timeout new_timeout = float(timeout) @@ -772,7 +778,7 @@ async def _async_poll_for_reply( while True: try: if error_on_timeout_execute_reply: - msg = error_on_timeout_execute_reply + msg = error_on_timeout_execute_reply # type:ignore[unreachable] msg['parent_header'] = {'msg_id': msg_id} else: msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=new_timeout)) @@ -824,7 +830,7 @@ async def _async_poll_kernel_alive(self) -> None: self.task_poll_for_reply.cancel() return - def _get_timeout(self, cell: t.Optional[NotebookNode]) -> t.Optional[int]: + def _get_timeout(self, cell: NotebookNode | None) -> int | None: if self.timeout_func is not None and cell is not None: timeout = self.timeout_func(cell) else: @@ -836,8 +842,8 @@ def _get_timeout(self, cell: t.Optional[NotebookNode]) -> t.Optional[int]: return timeout async def _async_handle_timeout( - self, timeout: int, cell: t.Optional[NotebookNode] = None - ) -> t.Union[None, t.Dict]: + self, timeout: int, cell: NotebookNode | None = None + ) -> None | dict[str, t.Any]: self.log.error("Timeout waiting for execute reply (%is)." % timeout) if self.interrupt_on_timeout: self.log.error("Interrupting kernel") @@ -860,8 +866,8 @@ async def _async_check_alive(self) -> None: raise DeadKernelError("Kernel died") async def async_wait_for_reply( - self, msg_id: str, cell: t.Optional[NotebookNode] = None - ) -> t.Optional[t.Dict]: + self, msg_id: str, cell: NotebookNode | None = None + ) -> dict[str, t.Any] | None: """Wait for a message reply.""" assert self.kc is not None # wait for finish, with timeout @@ -869,7 +875,7 @@ async def async_wait_for_reply( cummulative_time = 0 while True: try: - msg: t.Dict = await ensure_async( + msg: dict[str, t.Any] = await ensure_async( self.kc.shell_channel.get_msg(timeout=self.shell_timeout_interval) ) except Empty: @@ -887,13 +893,13 @@ async def async_wait_for_reply( # Backwards compatibility naming for papermill _wait_for_reply = wait_for_reply - def _passed_deadline(self, deadline: int) -> bool: + def _passed_deadline(self, deadline: int | None) -> bool: if deadline is not None and deadline - monotonic() <= 0: return True return False async def _check_raise_for_error( - self, cell: NotebookNode, cell_index: int, exec_reply: t.Optional[t.Dict] + self, cell: NotebookNode, cell_index: int, exec_reply: dict[str, t.Any] | None ) -> None: if exec_reply is None: return None @@ -917,7 +923,7 @@ async def async_execute_cell( self, cell: NotebookNode, cell_index: int, - execution_count: t.Optional[int] = None, + execution_count: int | None = None, store_history: bool = True, ) -> NotebookNode: """ @@ -1063,8 +1069,8 @@ async def async_execute_cell( execute_cell = run_sync(async_execute_cell) def process_message( - self, msg: t.Dict, cell: NotebookNode, cell_index: int - ) -> t.Optional[NotebookNode]: + self, msg: dict[str, t.Any], cell: NotebookNode, cell_index: int + ) -> NotebookNode | None: """ Processes a kernel message, updates cell state, and returns the resulting output object that was appended to cell.outputs. @@ -1127,12 +1133,12 @@ def process_message( return None def output( - self, outs: t.List, msg: t.Dict, display_id: str, cell_index: int - ) -> t.Optional[NotebookNode]: + self, outs: list[NotebookNode], msg: dict[str, t.Any], display_id: str, cell_index: int + ) -> NotebookNode | None: """Handle output.""" msg_type = msg['msg_type'] - out: t.Optional[NotebookNode] = None + out: NotebookNode | None = None parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: @@ -1161,11 +1167,14 @@ def output( output_idx_list = cell_map.setdefault(cell_index, []) output_idx_list.append(len(outs)) - outs.append(out) + if out: + outs.append(out) return out - def clear_output(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: + def clear_output( + self, outs: list[NotebookNode], msg: dict[str, t.Any], cell_index: int + ) -> None: """Clear output.""" content = msg['content'] @@ -1191,7 +1200,9 @@ def clear_display_id_mapping(self, cell_index: int) -> None: if cell_index in cell_map: cell_map[cell_index] = [] - def handle_comm_msg(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: + def handle_comm_msg( + self, outs: list[NotebookNode], msg: dict[str, t.Any], cell_index: int + ) -> None: """Handle a comm message.""" content = msg['content'] data = content['data'] @@ -1202,7 +1213,7 @@ def handle_comm_msg(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: if comm_id not in self.widget_buffers: self.widget_buffers[comm_id] = {} # for each comm, the path uniquely identifies a buffer - new_buffers: t.Dict[t.Tuple[str, ...], t.Dict[str, str]] = { + new_buffers: dict[tuple[str, ...], dict[str, str]] = { tuple(k["path"]): k for k in self._get_buffer_data(msg) } self.widget_buffers[comm_id].update(new_buffers) @@ -1224,7 +1235,7 @@ def handle_comm_msg(self, outs: t.List, msg: t.Dict, cell_index: int) -> None: if comm_id in self.comm_objects: self.comm_objects[comm_id].handle_msg(msg) - def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: + def _serialize_widget_state(self, state: dict[str, t.Any]) -> dict[str, t.Any]: """Serialize a widget state, following format in @jupyter-widgets/schema.""" return { 'model_name': state.get('_model_name'), @@ -1233,7 +1244,7 @@ def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: 'state': state, } - def _get_buffer_data(self, msg: t.Dict) -> t.List[t.Dict[str, str]]: + def _get_buffer_data(self, msg: dict[str, t.Any]) -> list[dict[str, str]]: encoded_buffers = [] paths = msg['content']['data']['buffer_paths'] buffers = msg['buffers'] @@ -1263,7 +1274,7 @@ def remove_output_hook(self, msg_id: str, hook: OutputWidget) -> None: removed_hook = self.output_hook_stack[msg_id].pop() assert removed_hook == hook - def on_comm_open_jupyter_widget(self, msg: t.Dict) -> t.Optional[t.Any]: + def on_comm_open_jupyter_widget(self, msg: dict[str, t.Any]) -> t.Any | None: """Handle a jupyter widget comm open.""" content = msg['content'] data = content['data'] @@ -1279,8 +1290,8 @@ def on_comm_open_jupyter_widget(self, msg: t.Dict) -> t.Optional[t.Any]: def execute( nb: NotebookNode, - cwd: t.Optional[str] = None, - km: t.Optional[KernelManager] = None, + cwd: str | None = None, + km: KernelManager | None = None, **kwargs: t.Any, ) -> NotebookNode: """Execute a notebook's code, updating outputs within the notebook object. diff --git a/nbclient/exceptions.py b/nbclient/exceptions.py index cd6a739..9f917dd 100644 --- a/nbclient/exceptions.py +++ b/nbclient/exceptions.py @@ -1,5 +1,7 @@ """Exceptions for nbclient.""" -from typing import Dict, List +from __future__ import annotations + +from typing import Any from nbformat import NotebookNode @@ -22,7 +24,7 @@ class CellTimeoutError(TimeoutError, CellControlSignal): @classmethod def error_from_timeout_and_cell( cls, msg: str, timeout: int, cell: NotebookNode - ) -> "CellTimeoutError": + ) -> CellTimeoutError: """Create an error from a timeout on a cell.""" if cell and cell.source: src_by_lines = cell.source.strip().split("\n") @@ -68,9 +70,9 @@ def __init__(self, traceback: str, ename: str, evalue: str) -> None: self.ename = ename self.evalue = evalue - def __reduce__(self) -> tuple: + def __reduce__(self) -> tuple[Any]: """Reduce implementation.""" - return type(self), (self.traceback, self.ename, self.evalue) + return type(self), (self.traceback, self.ename, self.evalue) # type:ignore[return-value] def __str__(self) -> str: """Str repr.""" @@ -80,13 +82,13 @@ def __str__(self) -> str: return f"{self.ename}: {self.evalue}" @classmethod - def from_cell_and_msg(cls, cell: NotebookNode, msg: Dict) -> "CellExecutionError": + def from_cell_and_msg(cls, cell: NotebookNode, msg: dict[str, Any]) -> CellExecutionError: """Instantiate from a code cell object and a message contents (message is either execute_reply or error) """ # collect stream outputs for our error message - stream_outputs: List[str] = [] + stream_outputs: list[str] = [] for output in cell.outputs: if output["output_type"] == "stream": stream_outputs.append( diff --git a/nbclient/jsonutil.py b/nbclient/jsonutil.py index e67ffd5..d4bdcbb 100644 --- a/nbclient/jsonutil.py +++ b/nbclient/jsonutil.py @@ -4,6 +4,7 @@ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations import math import numbers @@ -11,7 +12,7 @@ import types from binascii import b2a_base64 from datetime import datetime -from typing import Dict +from typing import Any # ----------------------------------------------------------------------------- # Globals and constants @@ -46,7 +47,7 @@ PDF64 = b'JVBER' -def encode_images(format_dict: Dict) -> Dict[str, str]: +def encode_images(format_dict: dict[str, str]) -> dict[str, str]: """b64-encodes images in a displaypub format dict Perhaps this should be handled in json_clean itself? @@ -69,7 +70,7 @@ def encode_images(format_dict: Dict) -> Dict[str, str]: return format_dict -def json_clean(obj): +def json_clean(obj: Any) -> Any: """Clean an object to ensure it's safe to encode in JSON. Atomic, immutable objects are returned unmodified. Sets and tuples are diff --git a/nbclient/output_widget.py b/nbclient/output_widget.py index 236bd79..8a9bb85 100644 --- a/nbclient/output_widget.py +++ b/nbclient/output_widget.py @@ -1,7 +1,10 @@ """An output widget mimic.""" -from typing import Any, Dict, List, Optional +from __future__ import annotations + +from typing import Any from jupyter_client.client import KernelClient +from nbformat import NotebookNode from nbformat.v4 import output_from_msg from .jsonutil import json_clean @@ -11,18 +14,18 @@ class OutputWidget: """This class mimics a front end output widget""" def __init__( - self, comm_id: str, state: Dict[str, Any], kernel_client: KernelClient, executor: Any + self, comm_id: str, state: dict[str, Any], kernel_client: KernelClient, executor: Any ) -> None: """Initialize the widget.""" self.comm_id: str = comm_id - self.state: Dict[str, Any] = state + self.state: dict[str, Any] = state self.kernel_client: KernelClient = kernel_client self.executor = executor self.topic: bytes = ('comm-%s' % self.comm_id).encode('ascii') - self.outputs: List = self.state['outputs'] + self.outputs: list[NotebookNode] = self.state['outputs'] self.clear_before_next_output: bool = False - def clear_output(self, outs: List, msg: Dict, cell_index: int) -> None: + def clear_output(self, outs: list[NotebookNode], msg: dict[str, Any], cell_index: int) -> None: """Clear output.""" self.parent_header = msg['parent_header'] content = msg['content'] @@ -45,9 +48,9 @@ def sync_state(self) -> None: def _publish_msg( self, msg_type: str, - data: Optional[Dict] = None, - metadata: Optional[Dict] = None, - buffers: Optional[List] = None, + data: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, + buffers: list[Any] | None = None, **keys: Any, ) -> None: """Helper for sending a comm message on IOPub""" @@ -61,20 +64,22 @@ def _publish_msg( def send( self, - data: Optional[Dict] = None, - metadata: Optional[Dict] = None, - buffers: Optional[List] = None, + data: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, + buffers: list[Any] | None = None, ) -> None: """Send a comm message.""" self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers) - def output(self, outs: List, msg: Dict, display_id: str, cell_index: int) -> None: + def output( + self, outs: list[NotebookNode], msg: dict[str, Any], display_id: str, cell_index: int + ) -> None: """Handle output.""" if self.clear_before_next_output: self.outputs = [] self.clear_before_next_output = False self.parent_header = msg['parent_header'] - output = output_from_msg(msg) + output = output_from_msg(msg) # type:ignore[no-untyped-call] if self.outputs: # try to coalesce/merge output text @@ -94,7 +99,7 @@ def output(self, outs: List, msg: Dict, display_id: str, cell_index: int) -> Non # sync the state to the nbconvert state as well, since that is used for testing self.executor.widget_state[self.comm_id]['outputs'] = self.outputs - def set_state(self, state: Dict) -> None: + def set_state(self, state: dict[str, Any]) -> None: """Set the state.""" if 'msg_id' in state: msg_id = state.get('msg_id') @@ -105,7 +110,7 @@ def set_state(self, state: Dict) -> None: self.executor.remove_output_hook(self.msg_id, self) self.msg_id = msg_id - def handle_msg(self, msg: Dict) -> None: + def handle_msg(self, msg: dict[str, Any]) -> None: """Handle a message.""" content = msg['content'] comm_id = content['comm_id'] diff --git a/nbclient/tests/base.py b/nbclient/tests/base.py index 69ee12c..538b937 100644 --- a/nbclient/tests/base.py +++ b/nbclient/tests/base.py @@ -2,6 +2,8 @@ from nbformat import v4 as nbformat +# mypy: disable-error-code="no-untyped-call,no-untyped-def" + class NBClientTestsBase(unittest.TestCase): def build_notebook(self, with_json_outputs=False): diff --git a/nbclient/tests/conftest.py b/nbclient/tests/conftest.py index 7959fcb..6054a03 100644 --- a/nbclient/tests/conftest.py +++ b/nbclient/tests/conftest.py @@ -1,6 +1,10 @@ +import asyncio import os # This is important for ipykernel to show the same string # instead of randomly generated file names in outputs. # See: https://github.com/ipython/ipykernel/blob/360685c6/ipykernel/compiler.py#L50-L55 os.environ["IPYKERNEL_CELL_NAME"] = "" + +if os.name == 'nt' and hasattr(asyncio, 'WindowsSelectorEventLoopPolicy'): + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) diff --git a/nbclient/tests/fake_kernelmanager.py b/nbclient/tests/fake_kernelmanager.py index de1e83b..167bb81 100644 --- a/nbclient/tests/fake_kernelmanager.py +++ b/nbclient/tests/fake_kernelmanager.py @@ -1,5 +1,7 @@ from jupyter_client.manager import AsyncKernelManager +# mypy: disable-error-code="no-untyped-call,no-untyped-def" + class FakeCustomKernelManager(AsyncKernelManager): expected_methods = {'__init__': 0, 'client': 0, 'start_kernel': 0} # noqa diff --git a/nbclient/tests/test_client.py b/nbclient/tests/test_client.py index f8b3619..92f1936 100644 --- a/nbclient/tests/test_client.py +++ b/nbclient/tests/test_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import concurrent.futures import copy @@ -16,10 +18,11 @@ import nbformat import pytest import xmltodict -from flaky import flaky # type:ignore -from jupyter_client import KernelClient, KernelManager +from flaky import flaky # type:ignore[import] from jupyter_client._version import version_info +from jupyter_client.client import KernelClient from jupyter_client.kernelspec import KernelSpecManager +from jupyter_client.manager import KernelManager from nbconvert.filters import strip_ansi from nbformat import NotebookNode from testpath import modified_env @@ -30,6 +33,8 @@ from .base import NBClientTestsBase +# mypy: disable-error-code="no-untyped-call,no-untyped-def" + addr_pat = re.compile(r'0x[0-9a-f]{7,9}') current_dir = os.path.dirname(__file__) ipython_input_pat = re.compile( @@ -87,13 +92,13 @@ class AsyncMock(Mock): pass -def make_future(obj: Any) -> asyncio.Future: +def make_future(obj: Any) -> asyncio.Future[Any]: try: loop = asyncio.get_running_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - future: asyncio.Future = asyncio.Future(loop=loop) + future: asyncio.Future[Any] = asyncio.Future(loop=loop) future.set_result(obj) return future @@ -303,7 +308,7 @@ def notebook_resources(): def filter_messages_on_error_output(err_output): allowed_lines = [ - # ipykernel migh be installed without debugpy extension + # ipykernel might be installed without debugpy extension "[IPKernelApp] WARNING | debugpy_stream undefined, debugging will not be enabled", ] filtered_result = [line for line in err_output.splitlines() if line not in allowed_lines] @@ -557,7 +562,7 @@ def stop_channels(self) -> None: assert str(err.value.args[0]) == "Any error" assert executor.kc is None assert executor.km is None - assert not km.has_kernel + assert not km.has_kernel # type:ignore[unreachable] class TestExecute(NBClientTestsBase): @@ -680,7 +685,7 @@ def test_kernel_death_after_timeout(self): async def is_alive(): return False - km.is_alive = is_alive # type:ignore + km.is_alive = is_alive # type:ignore[method-assign] # Will be a RuntimeError, TimeoutError, or subclass DeadKernelError # depending # on if jupyter_client or nbconvert catches the dead client first @@ -820,7 +825,7 @@ def test_custom_kernel_manager(self): self.assertNotEqual(call_count, 0, f'{method} was called') def test_process_message_wrapper(self): - outputs: list = [] + outputs: list[Any] = [] class WrappedPreProc(NotebookClient): def process_message(self, msg, cell, cell_index): diff --git a/nbclient/tests/test_util.py b/nbclient/tests/test_util.py index 2dd8933..b1e6afd 100644 --- a/nbclient/tests/test_util.py +++ b/nbclient/tests/test_util.py @@ -6,6 +6,8 @@ from nbclient.util import run_hook, run_sync +# mypy: disable-error-code="no-untyped-call,no-untyped-def" + @run_sync async def some_async_function(): @@ -36,7 +38,7 @@ def test_nested_asyncio_with_tornado(): ioloop = tornado.ioloop.IOLoop.current() async def some_async_function(): - future: asyncio.Future = asyncio.ensure_future(asyncio.sleep(0.1)) + future: asyncio.Future[None] = asyncio.ensure_future(asyncio.sleep(0.1)) # the asyncio module, check if tornado likes it: ioloop.add_future(future, lambda f: f.result()) await future diff --git a/nbclient/util.py b/nbclient/util.py index 1a98500..5850cfc 100644 --- a/nbclient/util.py +++ b/nbclient/util.py @@ -2,14 +2,17 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations import inspect -from typing import Any, Callable, Optional +from typing import Any, Callable -from jupyter_core.utils import ensure_async, run_sync # noqa: F401 +from jupyter_core.utils import ensure_async, run_sync +__all__ = ["ensure_async", "run_sync", "run_hook"] -async def run_hook(hook: Optional[Callable], **kwargs: Any) -> None: + +async def run_hook(hook: Callable[..., Any] | None, **kwargs: Any) -> None: """Run a hook callback.""" if hook is None: return diff --git a/pyproject.toml b/pyproject.toml index 2059eae..882a93e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,9 +106,9 @@ nowarn = "test -W default {args}" [tool.hatch.envs.typing] features = ["test"] -dependencies = ["mypy>=0.990"] +dependencies = ["mypy>=1.5.1", "traitlets>=5.11.2", "jupyter_core>=5.3.2"] [tool.hatch.envs.typing.scripts] -test = "mypy --install-types --non-interactive {args:nbclient}" +test = "mypy --install-types --non-interactive {args}" [tool.hatch.envs.lint] dependencies = [ @@ -138,7 +138,24 @@ exclude = "/(\n \\.git\n | \\.hg\n | \\.mypy_cache\n | \\.tox\n | \\.venv skip-string-normalization = true [tool.pytest.ini_options] -addopts = "-raXs --durations 10 --color=yes --doctest-modules" +minversion = "6.0" +xfail_strict = true +log_cli_level = "info" +addopts = [ + "-raXs", "--durations=10", "--color=yes", "--doctest-modules", + "--showlocals", "--strict-markers", "--strict-config" +] +testpaths = ["nbclient/tests"] +filterwarnings= [ + # Fail on warnings + "error", + "module:Jupyter is migrating its paths:DeprecationWarning", + "module:unclosed