Skip to content

Commit

Permalink
MAINT: run pyupgrade for py39 (#135)
Browse files Browse the repository at this point in the history
* MAINT: running pyupgrade for 3.9

* MAINT: bumping python version in pre-commit

* MAINT: style cleanup

* MAINT: remove old python
  • Loading branch information
bsipocz authored Feb 5, 2025
1 parent 457f67a commit dd7a38c
Show file tree
Hide file tree
Showing 11 changed files with 68 additions and 66 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ repos:
rev: v3.19.1
hooks:
- id: pyupgrade
args: [--py37-plus]
args: [--py39-plus]

- repo: https://github.com/pycqa/isort
rev: 6.0.0
Expand Down
31 changes: 16 additions & 15 deletions jupyter_cache/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
"""

from abc import ABC, abstractmethod
from collections.abc import Iterable, Mapping
import io
from pathlib import Path
from typing import Iterable, List, Mapping, Optional, Tuple, Union
from typing import Optional, Union

import attr
from attr.validators import instance_of, optional
Expand Down Expand Up @@ -57,7 +58,7 @@ class ProjectNb:
repr=lambda nb: f"Notebook(cells={len(nb.cells)})",
metadata={"help": "the notebook"},
)
assets: List[Path] = attr.ib(
assets: list[Path] = attr.ib(
factory=list,
metadata={"help": "File paths required to run the notebook"},
)
Expand All @@ -68,11 +69,11 @@ class NbArtifactsAbstract(ABC):

@property
@abstractmethod
def relative_paths(self) -> List[Path]:
def relative_paths(self) -> list[Path]:
"""Return the list of paths (relative to the notebook folder)."""

@abstractmethod
def __iter__(self) -> Iterable[Tuple[Path, io.BufferedReader]]:
def __iter__(self) -> Iterable[tuple[Path, io.BufferedReader]]:
"""Yield the relative path and open files (in bytes mode)"""

def __repr__(self):
Expand Down Expand Up @@ -165,7 +166,7 @@ def cache_notebook_file(
self,
path: str,
uri: Optional[str] = None,
artifacts: List[str] = (),
artifacts: list[str] = (),
data: Optional[dict] = None,
check_validity: bool = True,
overwrite: bool = False,
Expand All @@ -186,7 +187,7 @@ def cache_notebook_file(
"""

@abstractmethod
def list_cache_records(self) -> List[NbCacheRecord]:
def list_cache_records(self) -> list[NbCacheRecord]:
"""Return a list of cached notebook records."""

@abstractmethod
Expand Down Expand Up @@ -229,7 +230,7 @@ def merge_match_into_notebook(
nb: nbf.NotebookNode,
nb_meta=("kernelspec", "language_info", "widgets"),
cell_meta=None,
) -> Tuple[int, nbf.NotebookNode]:
) -> tuple[int, nbf.NotebookNode]:
"""Match to an executed notebook and return a merged version
:param nb: The input notebook
Expand All @@ -244,7 +245,7 @@ def merge_match_into_file(
path: str,
nb_meta=("kernelspec", "language_info", "widgets"),
cell_meta=None,
) -> Tuple[int, nbf.NotebookNode]:
) -> tuple[int, nbf.NotebookNode]:
"""Match to an executed notebook and return a merged version
:param path: The input notebook path
Expand Down Expand Up @@ -281,7 +282,7 @@ def add_nb_to_project(
uri: str,
*,
read_data: Mapping = DEFAULT_READ_DATA,
assets: List[str] = (),
assets: list[str] = (),
) -> NbProjectRecord:
"""Add a single notebook to the project.
Expand All @@ -298,9 +299,9 @@ def remove_nb_from_project(self, uri_or_pk: Union[int, str]):
@abstractmethod
def list_project_records(
self,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
) -> List[NbProjectRecord]:
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
) -> list[NbProjectRecord]:
"""Return a list of all notebook records in the project."""

@abstractmethod
Expand All @@ -326,7 +327,7 @@ def get_cached_project_nb(
@abstractmethod
def list_unexecuted(
self,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
) -> List[NbProjectRecord]:
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
) -> list[NbProjectRecord]:
"""List notebooks in the project, whose hash is not present in the cache."""
12 changes: 6 additions & 6 deletions jupyter_cache/cache/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import datetime
import os
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from typing import Any, Optional, Union

from sqlalchemy import JSON, Column, DateTime, Integer, String, Text
from sqlalchemy.engine import Engine, create_engine
Expand Down Expand Up @@ -203,7 +203,7 @@ def validate_assets(paths, uri=None):
def create_record(
uri: str,
db: Engine,
read_data: Dict[str, Any],
read_data: dict[str, Any],
raise_on_exists=True,
*,
assets=(),
Expand All @@ -222,14 +222,14 @@ def create_record(
session.expunge(record)
return record

def remove_pks(pks: List[int], db: Engine):
def remove_pks(pks: list[int], db: Engine):
with session_context(db) as session: # type: Session
session.query(NbProjectRecord).filter(NbProjectRecord.pk.in_(pks)).delete(
synchronize_session=False
)
session.commit()

def remove_uris(uris: List[str], db: Engine):
def remove_uris(uris: list[str], db: Engine):
with session_context(db) as session: # type: Session
session.query(NbProjectRecord).filter(NbProjectRecord.uri.in_(uris)).delete(
synchronize_session=False
Expand Down Expand Up @@ -339,7 +339,7 @@ def remove_record(pk: int, db: Engine):
session.delete(record)
session.commit()

def remove_records(pks: List[int], db: Engine):
def remove_records(pks: list[int], db: Engine):
with session_context(db) as session: # type: Session
session.query(NbCacheRecord).filter(NbCacheRecord.pk.in_(pks)).delete(
synchronize_session=False
Expand Down Expand Up @@ -400,7 +400,7 @@ def records_all(db: Engine) -> "NbCacheRecord":
session.expunge_all()
return results

def records_to_delete(keep: int, db: Engine) -> List[int]:
def records_to_delete(keep: int, db: Engine) -> list[int]:
"""Return pks of the oldest records, where keep is number to keep."""
with session_context(db) as session: # type: Session
pks_to_keep = [
Expand Down
31 changes: 16 additions & 15 deletions jupyter_cache/cache/main.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from collections.abc import Iterable, Mapping
from contextlib import contextmanager
import copy
import hashlib
import io
from pathlib import Path
import shutil
from typing import Iterable, List, Mapping, Optional, Tuple, Union
from typing import Optional, Union

import nbformat as nbf

Expand All @@ -31,7 +32,7 @@
class NbArtifacts(NbArtifactsAbstract):
"""Container for artefacts of a notebook execution."""

def __init__(self, paths: List[str], in_folder, check_existence=True):
def __init__(self, paths: list[str], in_folder, check_existence=True):
"""Initiate NbArtifacts
:param paths: list of paths
Expand All @@ -44,11 +45,11 @@ def __init__(self, paths: List[str], in_folder, check_existence=True):
to_relative_paths(self.paths, self.in_folder, check_existence=check_existence)

@property
def relative_paths(self) -> List[Path]:
def relative_paths(self) -> list[Path]:
"""Return the list of paths (relative to the notebook folder)."""
return to_relative_paths(self.paths, self.in_folder)

def __iter__(self) -> Iterable[Tuple[Path, io.BufferedReader]]:
def __iter__(self) -> Iterable[tuple[Path, io.BufferedReader]]:
"""Yield the relative path and open files (in bytes mode)"""
for path in self.paths:
with path.open("rb") as handle:
Expand Down Expand Up @@ -123,7 +124,7 @@ def create_hashed_notebook(
nb: nbf.NotebookNode,
nb_metadata: Optional[Iterable[str]] = ("kernelspec",),
cell_metadata: Optional[Iterable[str]] = None,
) -> Tuple[nbf.NotebookNode, str]:
) -> tuple[nbf.NotebookNode, str]:
"""Convert a notebook to a standard format and hash.
Note: we always hash notebooks as version 4.4,
Expand Down Expand Up @@ -254,7 +255,7 @@ def cache_notebook_file(
self,
path: str,
uri: Optional[str] = None,
artifacts: List[str] = (),
artifacts: list[str] = (),
data: Optional[dict] = None,
check_validity: bool = True,
overwrite: bool = False,
Expand Down Expand Up @@ -285,7 +286,7 @@ def cache_notebook_file(
overwrite=overwrite,
)

def list_cache_records(self) -> List[NbCacheRecord]:
def list_cache_records(self) -> list[NbCacheRecord]:
return NbCacheRecord.records_all(self.db)

def get_cache_record(self, pk: int) -> NbCacheRecord:
Expand Down Expand Up @@ -343,7 +344,7 @@ def merge_match_into_notebook(
nb: nbf.NotebookNode,
nb_meta: Optional[Iterable[str]] = ("kernelspec", "language_info", "widgets"),
cell_meta: Optional[Iterable[str]] = None,
) -> Tuple[int, nbf.NotebookNode]:
) -> tuple[int, nbf.NotebookNode]:
"""Match to an executed notebook and return a merged version
:param nb: The input notebook
Expand Down Expand Up @@ -413,7 +414,7 @@ def add_nb_to_project(
path: str,
*,
read_data: Mapping = DEFAULT_READ_DATA,
assets: List[str] = (),
assets: list[str] = (),
) -> NbProjectRecord:
# check the reader can be loaded
read_data = dict(read_data)
Expand All @@ -431,9 +432,9 @@ def add_nb_to_project(

def list_project_records(
self,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
) -> List[NbProjectRecord]:
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
) -> list[NbProjectRecord]:
records = NbProjectRecord.records_all(self.db)
if filter_uris is not None:
records = [r for r in records if r.uri in filter_uris]
Expand Down Expand Up @@ -487,9 +488,9 @@ def get_cached_project_nb(

def list_unexecuted(
self,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
) -> List[NbProjectRecord]:
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
) -> list[NbProjectRecord]:
records = []
for record in self.list_project_records(filter_uris, filter_pks):
nb = self.get_project_notebook(record.uri).nb
Expand Down
4 changes: 2 additions & 2 deletions jupyter_cache/entry_points.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Module for dealing with entry points."""

from typing import Optional, Set
from typing import Optional

# TODO importlib.metadata was introduced into the standard library in python 3.8
# so we can change this when we drop support for 3.7
Expand All @@ -14,7 +14,7 @@
ENTRY_POINT_GROUP_EXEC = "jcache.executors"


def list_group_names(group: str) -> Set[str]:
def list_group_names(group: str) -> set[str]:
"""Return the entry points within a group."""
all_eps = eps()
try:
Expand Down
24 changes: 12 additions & 12 deletions jupyter_cache/executors/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
import logging
from typing import Any, Dict, List, Optional, Set
from typing import Any, Optional

import attr

Expand All @@ -24,17 +24,17 @@ class ExecutorRunResult:
"""A container for the execution result."""

# URIs of notebooks which where successfully executed
succeeded: List[str] = attr.ib(factory=list)
succeeded: list[str] = attr.ib(factory=list)
# URIs of notebooks which excepted during execution
excepted: List[str] = attr.ib(factory=list)
excepted: list[str] = attr.ib(factory=list)
# URIs of notebooks which errored before execution
errored: List[str] = attr.ib(factory=list)
errored: list[str] = attr.ib(factory=list)

def all(self) -> List[str]:
def all(self) -> list[str]:
"""Return all notebooks."""
return self.succeeded + self.excepted + self.errored

def as_json(self) -> Dict[str, Any]:
def as_json(self) -> dict[str, Any]:
"""Return the result as a JSON serializable dict."""
return {
"succeeded": self.succeeded,
Expand Down Expand Up @@ -63,11 +63,11 @@ def logger(self):

def get_records(
self,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
clear_tracebacks: bool = True,
force: bool = False,
) -> List[NbProjectRecord]:
) -> list[NbProjectRecord]:
"""Return records to execute.
:param clear_tracebacks: Remove any tracebacks from previous executions
Expand All @@ -86,8 +86,8 @@ def get_records(
def run_and_cache(
self,
*,
filter_uris: Optional[List[str]] = None,
filter_pks: Optional[List[int]] = None,
filter_uris: Optional[list[str]] = None,
filter_pks: Optional[list[int]] = None,
timeout: Optional[int] = 30,
allow_errors: bool = False,
force: bool = False,
Expand All @@ -105,7 +105,7 @@ def run_and_cache(
"""


def list_executors() -> Set[str]:
def list_executors() -> set[str]:
return list_group_names(ENTRY_POINT_GROUP_EXEC)


Expand Down
4 changes: 2 additions & 2 deletions jupyter_cache/executors/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os
from pathlib import Path
import tempfile
from typing import NamedTuple, Tuple
from typing import NamedTuple

from jupyter_cache.base import JupyterCacheAbstract, ProjectNb
from jupyter_cache.cache.db import NbProjectRecord
Expand Down Expand Up @@ -45,7 +45,7 @@ def log_info(self, msg: str):
def execute(self, project_nb: ProjectNb, data: ProcessData) -> ExecutionResult:
raise NotImplementedError

def __call__(self, data: ProcessData) -> Tuple[int, str]:
def __call__(self, data: ProcessData) -> tuple[int, str]:
try:
project_nb = data.cache.get_project_notebook(data.pk)
except Exception:
Expand Down
Loading

0 comments on commit dd7a38c

Please sign in to comment.