diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 95a76d7..0bfa230 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: rev: v3.19.1 hooks: - id: pyupgrade - args: [--py37-plus] + args: [--py39-plus] - repo: https://github.com/pycqa/isort rev: 6.0.0 diff --git a/jupyter_cache/base.py b/jupyter_cache/base.py index 2df9d74..9a10610 100644 --- a/jupyter_cache/base.py +++ b/jupyter_cache/base.py @@ -5,9 +5,10 @@ """ from abc import ABC, abstractmethod +from collections.abc import Iterable, Mapping import io from pathlib import Path -from typing import Iterable, List, Mapping, Optional, Tuple, Union +from typing import Optional, Union import attr from attr.validators import instance_of, optional @@ -57,7 +58,7 @@ class ProjectNb: repr=lambda nb: f"Notebook(cells={len(nb.cells)})", metadata={"help": "the notebook"}, ) - assets: List[Path] = attr.ib( + assets: list[Path] = attr.ib( factory=list, metadata={"help": "File paths required to run the notebook"}, ) @@ -68,11 +69,11 @@ class NbArtifactsAbstract(ABC): @property @abstractmethod - def relative_paths(self) -> List[Path]: + def relative_paths(self) -> list[Path]: """Return the list of paths (relative to the notebook folder).""" @abstractmethod - def __iter__(self) -> Iterable[Tuple[Path, io.BufferedReader]]: + def __iter__(self) -> Iterable[tuple[Path, io.BufferedReader]]: """Yield the relative path and open files (in bytes mode)""" def __repr__(self): @@ -165,7 +166,7 @@ def cache_notebook_file( self, path: str, uri: Optional[str] = None, - artifacts: List[str] = (), + artifacts: list[str] = (), data: Optional[dict] = None, check_validity: bool = True, overwrite: bool = False, @@ -186,7 +187,7 @@ def cache_notebook_file( """ @abstractmethod - def list_cache_records(self) -> List[NbCacheRecord]: + def list_cache_records(self) -> list[NbCacheRecord]: """Return a list of cached notebook records.""" @abstractmethod @@ -229,7 +230,7 @@ def merge_match_into_notebook( nb: nbf.NotebookNode, nb_meta=("kernelspec", "language_info", "widgets"), cell_meta=None, - ) -> Tuple[int, nbf.NotebookNode]: + ) -> tuple[int, nbf.NotebookNode]: """Match to an executed notebook and return a merged version :param nb: The input notebook @@ -244,7 +245,7 @@ def merge_match_into_file( path: str, nb_meta=("kernelspec", "language_info", "widgets"), cell_meta=None, - ) -> Tuple[int, nbf.NotebookNode]: + ) -> tuple[int, nbf.NotebookNode]: """Match to an executed notebook and return a merged version :param path: The input notebook path @@ -281,7 +282,7 @@ def add_nb_to_project( uri: str, *, read_data: Mapping = DEFAULT_READ_DATA, - assets: List[str] = (), + assets: list[str] = (), ) -> NbProjectRecord: """Add a single notebook to the project. @@ -298,9 +299,9 @@ def remove_nb_from_project(self, uri_or_pk: Union[int, str]): @abstractmethod def list_project_records( self, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, - ) -> List[NbProjectRecord]: + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, + ) -> list[NbProjectRecord]: """Return a list of all notebook records in the project.""" @abstractmethod @@ -326,7 +327,7 @@ def get_cached_project_nb( @abstractmethod def list_unexecuted( self, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, - ) -> List[NbProjectRecord]: + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, + ) -> list[NbProjectRecord]: """List notebooks in the project, whose hash is not present in the cache.""" diff --git a/jupyter_cache/cache/db.py b/jupyter_cache/cache/db.py index de39943..31057a3 100644 --- a/jupyter_cache/cache/db.py +++ b/jupyter_cache/cache/db.py @@ -2,7 +2,7 @@ import datetime import os from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from sqlalchemy import JSON, Column, DateTime, Integer, String, Text from sqlalchemy.engine import Engine, create_engine @@ -203,7 +203,7 @@ def validate_assets(paths, uri=None): def create_record( uri: str, db: Engine, - read_data: Dict[str, Any], + read_data: dict[str, Any], raise_on_exists=True, *, assets=(), @@ -222,14 +222,14 @@ def create_record( session.expunge(record) return record - def remove_pks(pks: List[int], db: Engine): + def remove_pks(pks: list[int], db: Engine): with session_context(db) as session: # type: Session session.query(NbProjectRecord).filter(NbProjectRecord.pk.in_(pks)).delete( synchronize_session=False ) session.commit() - def remove_uris(uris: List[str], db: Engine): + def remove_uris(uris: list[str], db: Engine): with session_context(db) as session: # type: Session session.query(NbProjectRecord).filter(NbProjectRecord.uri.in_(uris)).delete( synchronize_session=False @@ -339,7 +339,7 @@ def remove_record(pk: int, db: Engine): session.delete(record) session.commit() - def remove_records(pks: List[int], db: Engine): + def remove_records(pks: list[int], db: Engine): with session_context(db) as session: # type: Session session.query(NbCacheRecord).filter(NbCacheRecord.pk.in_(pks)).delete( synchronize_session=False @@ -400,7 +400,7 @@ def records_all(db: Engine) -> "NbCacheRecord": session.expunge_all() return results - def records_to_delete(keep: int, db: Engine) -> List[int]: + def records_to_delete(keep: int, db: Engine) -> list[int]: """Return pks of the oldest records, where keep is number to keep.""" with session_context(db) as session: # type: Session pks_to_keep = [ diff --git a/jupyter_cache/cache/main.py b/jupyter_cache/cache/main.py index ea63dbe..fa67cd8 100644 --- a/jupyter_cache/cache/main.py +++ b/jupyter_cache/cache/main.py @@ -1,10 +1,11 @@ +from collections.abc import Iterable, Mapping from contextlib import contextmanager import copy import hashlib import io from pathlib import Path import shutil -from typing import Iterable, List, Mapping, Optional, Tuple, Union +from typing import Optional, Union import nbformat as nbf @@ -31,7 +32,7 @@ class NbArtifacts(NbArtifactsAbstract): """Container for artefacts of a notebook execution.""" - def __init__(self, paths: List[str], in_folder, check_existence=True): + def __init__(self, paths: list[str], in_folder, check_existence=True): """Initiate NbArtifacts :param paths: list of paths @@ -44,11 +45,11 @@ def __init__(self, paths: List[str], in_folder, check_existence=True): to_relative_paths(self.paths, self.in_folder, check_existence=check_existence) @property - def relative_paths(self) -> List[Path]: + def relative_paths(self) -> list[Path]: """Return the list of paths (relative to the notebook folder).""" return to_relative_paths(self.paths, self.in_folder) - def __iter__(self) -> Iterable[Tuple[Path, io.BufferedReader]]: + def __iter__(self) -> Iterable[tuple[Path, io.BufferedReader]]: """Yield the relative path and open files (in bytes mode)""" for path in self.paths: with path.open("rb") as handle: @@ -123,7 +124,7 @@ def create_hashed_notebook( nb: nbf.NotebookNode, nb_metadata: Optional[Iterable[str]] = ("kernelspec",), cell_metadata: Optional[Iterable[str]] = None, - ) -> Tuple[nbf.NotebookNode, str]: + ) -> tuple[nbf.NotebookNode, str]: """Convert a notebook to a standard format and hash. Note: we always hash notebooks as version 4.4, @@ -254,7 +255,7 @@ def cache_notebook_file( self, path: str, uri: Optional[str] = None, - artifacts: List[str] = (), + artifacts: list[str] = (), data: Optional[dict] = None, check_validity: bool = True, overwrite: bool = False, @@ -285,7 +286,7 @@ def cache_notebook_file( overwrite=overwrite, ) - def list_cache_records(self) -> List[NbCacheRecord]: + def list_cache_records(self) -> list[NbCacheRecord]: return NbCacheRecord.records_all(self.db) def get_cache_record(self, pk: int) -> NbCacheRecord: @@ -343,7 +344,7 @@ def merge_match_into_notebook( nb: nbf.NotebookNode, nb_meta: Optional[Iterable[str]] = ("kernelspec", "language_info", "widgets"), cell_meta: Optional[Iterable[str]] = None, - ) -> Tuple[int, nbf.NotebookNode]: + ) -> tuple[int, nbf.NotebookNode]: """Match to an executed notebook and return a merged version :param nb: The input notebook @@ -413,7 +414,7 @@ def add_nb_to_project( path: str, *, read_data: Mapping = DEFAULT_READ_DATA, - assets: List[str] = (), + assets: list[str] = (), ) -> NbProjectRecord: # check the reader can be loaded read_data = dict(read_data) @@ -431,9 +432,9 @@ def add_nb_to_project( def list_project_records( self, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, - ) -> List[NbProjectRecord]: + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, + ) -> list[NbProjectRecord]: records = NbProjectRecord.records_all(self.db) if filter_uris is not None: records = [r for r in records if r.uri in filter_uris] @@ -487,9 +488,9 @@ def get_cached_project_nb( def list_unexecuted( self, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, - ) -> List[NbProjectRecord]: + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, + ) -> list[NbProjectRecord]: records = [] for record in self.list_project_records(filter_uris, filter_pks): nb = self.get_project_notebook(record.uri).nb diff --git a/jupyter_cache/entry_points.py b/jupyter_cache/entry_points.py index 22ccd3f..51ca6ed 100644 --- a/jupyter_cache/entry_points.py +++ b/jupyter_cache/entry_points.py @@ -1,6 +1,6 @@ """Module for dealing with entry points.""" -from typing import Optional, Set +from typing import Optional # TODO importlib.metadata was introduced into the standard library in python 3.8 # so we can change this when we drop support for 3.7 @@ -14,7 +14,7 @@ ENTRY_POINT_GROUP_EXEC = "jcache.executors" -def list_group_names(group: str) -> Set[str]: +def list_group_names(group: str) -> set[str]: """Return the entry points within a group.""" all_eps = eps() try: diff --git a/jupyter_cache/executors/base.py b/jupyter_cache/executors/base.py index 26e35cd..7968c34 100644 --- a/jupyter_cache/executors/base.py +++ b/jupyter_cache/executors/base.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod import logging -from typing import Any, Dict, List, Optional, Set +from typing import Any, Optional import attr @@ -24,17 +24,17 @@ class ExecutorRunResult: """A container for the execution result.""" # URIs of notebooks which where successfully executed - succeeded: List[str] = attr.ib(factory=list) + succeeded: list[str] = attr.ib(factory=list) # URIs of notebooks which excepted during execution - excepted: List[str] = attr.ib(factory=list) + excepted: list[str] = attr.ib(factory=list) # URIs of notebooks which errored before execution - errored: List[str] = attr.ib(factory=list) + errored: list[str] = attr.ib(factory=list) - def all(self) -> List[str]: + def all(self) -> list[str]: """Return all notebooks.""" return self.succeeded + self.excepted + self.errored - def as_json(self) -> Dict[str, Any]: + def as_json(self) -> dict[str, Any]: """Return the result as a JSON serializable dict.""" return { "succeeded": self.succeeded, @@ -63,11 +63,11 @@ def logger(self): def get_records( self, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, clear_tracebacks: bool = True, force: bool = False, - ) -> List[NbProjectRecord]: + ) -> list[NbProjectRecord]: """Return records to execute. :param clear_tracebacks: Remove any tracebacks from previous executions @@ -86,8 +86,8 @@ def get_records( def run_and_cache( self, *, - filter_uris: Optional[List[str]] = None, - filter_pks: Optional[List[int]] = None, + filter_uris: Optional[list[str]] = None, + filter_pks: Optional[list[int]] = None, timeout: Optional[int] = 30, allow_errors: bool = False, force: bool = False, @@ -105,7 +105,7 @@ def run_and_cache( """ -def list_executors() -> Set[str]: +def list_executors() -> set[str]: return list_group_names(ENTRY_POINT_GROUP_EXEC) diff --git a/jupyter_cache/executors/basic.py b/jupyter_cache/executors/basic.py index 90d1911..4c19f61 100644 --- a/jupyter_cache/executors/basic.py +++ b/jupyter_cache/executors/basic.py @@ -3,7 +3,7 @@ import os from pathlib import Path import tempfile -from typing import NamedTuple, Tuple +from typing import NamedTuple from jupyter_cache.base import JupyterCacheAbstract, ProjectNb from jupyter_cache.cache.db import NbProjectRecord @@ -45,7 +45,7 @@ def log_info(self, msg: str): def execute(self, project_nb: ProjectNb, data: ProcessData) -> ExecutionResult: raise NotImplementedError - def __call__(self, data: ProcessData) -> Tuple[int, str]: + def __call__(self, data: ProcessData) -> tuple[int, str]: try: project_nb = data.cache.get_project_notebook(data.pk) except Exception: diff --git a/jupyter_cache/executors/utils.py b/jupyter_cache/executors/utils.py index 0a9f492..50b7c58 100644 --- a/jupyter_cache/executors/utils.py +++ b/jupyter_cache/executors/utils.py @@ -1,7 +1,7 @@ from pathlib import Path import shutil import traceback -from typing import Any, List, Optional, Union +from typing import Any, Optional, Union import attr from nbclient import execute as executenb @@ -70,7 +70,7 @@ def single_nb_execution( return ExecutionResult(nb, cwd, timer.last_split, error, exc_string) -def copy_assets(uri: str, assets: List[str], folder: str) -> List[Path]: +def copy_assets(uri: str, assets: list[str], folder: str) -> list[Path]: """Copy notebook assets to the folder the notebook will be executed in.""" asset_files = [] relative_paths = to_relative_paths(assets, Path(uri).parent) @@ -85,7 +85,7 @@ def copy_assets(uri: str, assets: List[str], folder: str) -> List[Path]: def create_cache_bundle( project_nb: ProjectNb, execdir: Optional[str], - asset_files: Optional[List[Path]], + asset_files: Optional[list[Path]], exec_time: float, exec_tb: Optional[str], ) -> CacheBundleIn: diff --git a/jupyter_cache/readers.py b/jupyter_cache/readers.py index a246f68..ee17170 100644 --- a/jupyter_cache/readers.py +++ b/jupyter_cache/readers.py @@ -1,6 +1,6 @@ """Module for handling different functions to read "notebook-like" files.""" -from typing import Any, Callable, Dict, Set +from typing import Any, Callable import nbformat as nbf @@ -23,12 +23,12 @@ def jupytext_reader(uri: str) -> nbf.NotebookNode: return jupytext.read(uri) -def list_readers() -> Set[str]: +def list_readers() -> set[str]: """List all available readers.""" return list_group_names(ENTRY_POINT_GROUP_READER) -def get_reader(data: Dict[str, Any]) -> Callable[[str], nbf.NotebookNode]: +def get_reader(data: dict[str, Any]) -> Callable[[str], nbf.NotebookNode]: """Returns a function to read a file URI and return a notebook.""" if data.get("type") == "plugin": key = data.get("name", "") diff --git a/jupyter_cache/utils.py b/jupyter_cache/utils.py index a0aa348..f741d52 100644 --- a/jupyter_cache/utils.py +++ b/jupyter_cache/utils.py @@ -2,7 +2,7 @@ from pathlib import Path import time -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from jupyter_cache.readers import NbReadError @@ -12,10 +12,10 @@ def to_relative_paths( - paths: List[Union[str, Path]], + paths: list[Union[str, Path]], folder: Union[str, Path], check_existence: bool = False, -) -> List[Path]: +) -> list[Path]: """Make paths relative to a reference folder. :param paths: list of paths @@ -80,7 +80,7 @@ def shorten_path(file_path: Union[str, Path], length: Optional[int]) -> Path: def tabulate_cache_records( - records: List["NbCacheRecord"], hashkeys=False, path_length=None + records: list["NbCacheRecord"], hashkeys=False, path_length=None ) -> str: """Tabulate cache records. @@ -100,7 +100,7 @@ def tabulate_cache_records( def tabulate_project_records( - records: List["NbProjectRecord"], + records: list["NbProjectRecord"], path_length: Optional[int] = None, cache: Optional["JupyterCacheAbstract"] = None, assets=False, diff --git a/tox.ini b/tox.ini index cda58fa..394678c 100644 --- a/tox.ini +++ b/tox.ini @@ -11,12 +11,12 @@ # then then deleting compiled files has been found to fix it: `find . -name \*.pyc -delete` [tox] -envlist = py38 +envlist = py39 [testenv] usedevelop = true -[testenv:py{37,38,39,310}] +[testenv:py{39,310}] extras = testing deps = black