Skip to content

Commit

Permalink
Merge branch 'main' into harmony_doc_fix
Browse files Browse the repository at this point in the history
  • Loading branch information
flying-sheep authored Nov 19, 2024
2 parents 6034b37 + 0f32b08 commit 99aa9da
Show file tree
Hide file tree
Showing 31 changed files with 126 additions and 83 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
# Python build files
__pycache__/
/src/scanpy/_version.py
/ci/scanpy-min-deps.txt
/dist/
/*-env/
/env-*/
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.3
rev: v0.7.4
hooks:
- id: ruff
types_or: [python, pyi, jupyter]
Expand Down
37 changes: 29 additions & 8 deletions ci/scripts/min-deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import argparse
import sys
from collections import deque
from contextlib import ExitStack
from pathlib import Path
from typing import TYPE_CHECKING

Expand All @@ -23,7 +24,7 @@
from packaging.version import Version

if TYPE_CHECKING:
from collections.abc import Generator, Iterable
from collections.abc import Generator, Iterable, Sequence


def min_dep(req: Requirement) -> Requirement:
Expand All @@ -34,7 +35,7 @@ def min_dep(req: Requirement) -> Requirement:
-------
>>> min_dep(Requirement("numpy>=1.0"))
"numpy==1.0"
<Requirement('numpy==1.0.*')>
"""
req_name = req.name
if req.extras:
Expand Down Expand Up @@ -75,12 +76,19 @@ def extract_min_deps(
yield min_dep(req)


def main():
class Args(argparse.Namespace):
path: Path
output: Path | None
extras: list[str]


def main(argv: Sequence[str] | None = None) -> None:
parser = argparse.ArgumentParser(
prog="min-deps",
description="""Parse a pyproject.toml file and output a list of minimum dependencies.
Output is directly passable to `pip install`.""",
description=(
"Parse a pyproject.toml file and output a list of minimum dependencies. "
"Output is optimized for `[uv] pip install` (see `-o`/`--output` for details)."
),
usage="pip install `python min-deps.py pyproject.toml`",
)
parser.add_argument(
Expand All @@ -89,8 +97,18 @@ def main():
parser.add_argument(
"--extras", type=str, nargs="*", default=(), help="extras to install"
)
parser.add_argument(
*("--output", "-o"),
type=Path,
default=None,
help=(
"output file (default: stdout). "
"Without this option, output is space-separated for direct passing to `pip install`. "
"With this option, output written to a file newline-separated file usable as `requirements.txt` or `constraints.txt`."
),
)

args = parser.parse_args()
args = parser.parse_args(argv, Args())

pyproject = tomllib.loads(args.path.read_text())

Expand All @@ -102,7 +120,10 @@ def main():

min_deps = extract_min_deps(deps, pyproject=pyproject)

print(" ".join(map(str, min_deps)))
sep = "\n" if args.output else " "
with ExitStack() as stack:
f = stack.enter_context(args.output.open("w")) if args.output else sys.stdout
print(sep.join(map(str, min_deps)), file=f)


if __name__ == "__main__":
Expand Down
8 changes: 5 additions & 3 deletions ci/scripts/towncrier_automation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
#!/usr/bin/env python3
# /// script
# dependencies = [ "towncrier", "packaging" ]
# ///

from __future__ import annotations

import argparse
Expand Down Expand Up @@ -62,9 +66,7 @@ def main(argv: Sequence[str] | None = None) -> None:
text=True,
check=True,
).stdout.strip()
pr_description = (
"" if base_branch == "main" else "@meeseeksmachine backport to main"
)
pr_description = "" if base_branch == "main" else "@meeseeksdev backport to main"
branch_name = f"release_notes_{args.version}"

# Create a new branch + commit
Expand Down
9 changes: 6 additions & 3 deletions hatch.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,17 @@ features = ["test", "dask-ml"]
extra-dependencies = ["ipykernel"]
overrides.matrix.deps.env-vars = [
{ if = ["pre"], key = "UV_PRERELEASE", value = "allow" },
{ if = ["min"], key = "UV_RESOLUTION", value = "lowest-direct" },
{ if = ["min"], key = "UV_CONSTRAINT", value = "ci/scanpy-min-deps.txt" },
]
overrides.matrix.deps.pre-install-commands = [
{ if = ["min"], value = "uv run ci/scripts/min-deps.py pyproject.toml -o ci/scanpy-min-deps.txt" },
]
overrides.matrix.deps.python = [
{ if = ["min"] , value = "3.10" },
{ if = ["min"], value = "3.10" },
{ if = ["stable", "full", "pre"], value = "3.12" },
]
overrides.matrix.deps.features = [
{ if = ["full"] , value = "test-full" },
{ if = ["full"], value = "test-full" },
]

[[envs.hatch-test.matrix]]
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ dependencies = [
"pandas >=1.5",
"scipy>=1.8",
"seaborn>=0.13",
"h5py>=3.6",
"h5py>=3.7",
"tqdm",
"scikit-learn>=1.1",
"statsmodels>=0.13",
Expand Down Expand Up @@ -166,7 +166,7 @@ addopts = [
"-ptesting.scanpy._pytest",
"--pyargs",
]
testpaths = ["./tests", "scanpy"]
testpaths = ["./tests", "./ci", "scanpy"]
norecursedirs = ["tests/_images"]
xfail_strict = true
nunit_attach_on = "fail"
Expand Down Expand Up @@ -211,7 +211,7 @@ exclude_also = [
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
# https://github.com/numba/numba/issues/4268
"@numba.njit.*",
'@(numba\.|nb\.)njit.*',
]

[tool.ruff]
Expand Down
4 changes: 4 additions & 0 deletions src/scanpy/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,19 @@
from pathlib import Path
from typing import TYPE_CHECKING, Literal, ParamSpec, TypeVar, cast, overload

import numpy as np
from packaging.version import Version

if TYPE_CHECKING:
from collections.abc import Callable
from importlib.metadata import PackageMetadata


P = ParamSpec("P")
R = TypeVar("R")

_LegacyRandom = int | np.random.RandomState | None


if TYPE_CHECKING:
# type checkers are confused and can only see …core.Array
Expand Down
10 changes: 6 additions & 4 deletions src/scanpy/_utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import re
import sys
import warnings
from collections.abc import Sequence
from contextlib import contextmanager, suppress
from enum import Enum
from functools import partial, reduce, singledispatch, wraps
Expand Down Expand Up @@ -56,12 +57,13 @@
from anndata import AnnData
from numpy.typing import ArrayLike, DTypeLike, NDArray

from .._compat import _LegacyRandom
from ..neighbors import NeighborsParams, RPForestDict


# e.g. https://scikit-learn.org/stable/modules/generated/sklearn.decomposition.PCA.html
# maybe in the future random.Generator
AnyRandom = int | np.random.RandomState | None
SeedLike = int | np.integer | Sequence[int] | np.random.SeedSequence
RNGLike = np.random.Generator | np.random.BitGenerator

LegacyUnionType = type(Union[int, str]) # noqa: UP007


Expand Down Expand Up @@ -493,7 +495,7 @@ def moving_average(a: np.ndarray, n: int):
return ret[n - 1 :] / n


def get_random_state(seed: AnyRandom) -> np.random.RandomState:
def _get_legacy_random(seed: _LegacyRandom) -> np.random.RandomState:
if isinstance(seed, np.random.RandomState):
return seed
return np.random.RandomState(seed)
Expand Down
4 changes: 2 additions & 2 deletions src/scanpy/datasets/_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
if TYPE_CHECKING:
from typing import Literal

from .._utils import AnyRandom
from .._compat import _LegacyRandom

VisiumSampleID = Literal[
"V1_Breast_Cancer_Block_A_Section_1",
Expand Down Expand Up @@ -63,7 +63,7 @@ def blobs(
n_centers: int = 5,
cluster_std: float = 1.0,
n_observations: int = 640,
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
) -> AnnData:
"""\
Gaussian Blobs.
Expand Down
4 changes: 2 additions & 2 deletions src/scanpy/external/pp/_dca.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

from anndata import AnnData

from ..._utils import AnyRandom
from ..._compat import _LegacyRandom

_AEType = Literal["zinb-conddisp", "zinb", "nb-conddisp", "nb"]

Expand Down Expand Up @@ -62,7 +62,7 @@ def dca(
early_stop: int = 15,
batch_size: int = 32,
optimizer: str = "RMSprop",
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
threads: int | None = None,
learning_rate: float | None = None,
verbose: bool = False,
Expand Down
4 changes: 2 additions & 2 deletions src/scanpy/external/pp/_magic.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

from anndata import AnnData

from ..._utils import AnyRandom
from ..._compat import _LegacyRandom

MIN_VERSION = "2.0"

Expand All @@ -36,7 +36,7 @@ def magic(
n_pca: int | None = 100,
solver: Literal["exact", "approximate"] = "exact",
knn_dist: str = "euclidean",
random_state: AnyRandom = None,
random_state: _LegacyRandom = None,
n_jobs: int | None = None,
verbose: bool = False,
copy: bool | None = None,
Expand Down
4 changes: 2 additions & 2 deletions src/scanpy/external/tl/_phate.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from anndata import AnnData

from ..._utils import AnyRandom
from ..._compat import _LegacyRandom


@old_positionals(
Expand Down Expand Up @@ -49,7 +49,7 @@ def phate(
mds_dist: str = "euclidean",
mds: Literal["classic", "metric", "nonmetric"] = "metric",
n_jobs: int | None = None,
random_state: AnyRandom = None,
random_state: _LegacyRandom = None,
verbose: bool | int | None = None,
copy: bool = False,
**kwargs,
Expand Down
12 changes: 6 additions & 6 deletions src/scanpy/neighbors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from igraph import Graph
from scipy.sparse import csr_matrix

from .._utils import AnyRandom
from .._compat import _LegacyRandom
from ._types import KnnTransformerLike, _Metric, _MetricFn


Expand All @@ -54,13 +54,13 @@ class KwdsForTransformer(TypedDict):
n_neighbors: int
metric: _Metric | _MetricFn
metric_params: Mapping[str, Any]
random_state: AnyRandom
random_state: _LegacyRandom


class NeighborsParams(TypedDict):
n_neighbors: int
method: _Method
random_state: AnyRandom
random_state: _LegacyRandom
metric: _Metric | _MetricFn
metric_kwds: NotRequired[Mapping[str, Any]]
use_rep: NotRequired[str]
Expand All @@ -79,7 +79,7 @@ def neighbors(
transformer: KnnTransformerLike | _KnownTransformer | None = None,
metric: _Metric | _MetricFn = "euclidean",
metric_kwds: Mapping[str, Any] = MappingProxyType({}),
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
key_added: str | None = None,
copy: bool = False,
) -> AnnData | None:
Expand Down Expand Up @@ -521,7 +521,7 @@ def compute_neighbors(
transformer: KnnTransformerLike | _KnownTransformer | None = None,
metric: _Metric | _MetricFn = "euclidean",
metric_kwds: Mapping[str, Any] = MappingProxyType({}),
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
) -> None:
"""\
Compute distances and connectivities of neighbors.
Expand Down Expand Up @@ -757,7 +757,7 @@ def compute_eigen(
n_comps: int = 15,
sym: bool | None = None,
sort: Literal["decrease", "increase"] = "decrease",
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
):
"""\
Compute eigen decomposition of transition matrix.
Expand Down
3 changes: 2 additions & 1 deletion src/scanpy/plotting/_tools/paga.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from matplotlib.colors import Colormap
from scipy.sparse import spmatrix

from ..._compat import _LegacyRandom
from ...tools._draw_graph import _Layout as _LayoutWithoutEqTree
from .._utils import _FontSize, _FontWeight, _LegendLoc

Expand Down Expand Up @@ -210,7 +211,7 @@ def _compute_pos(
adjacency_solid: spmatrix | np.ndarray,
*,
layout: _Layout | None = None,
random_state: _sc_utils.AnyRandom = 0,
random_state: _LegacyRandom = 0,
init_pos: np.ndarray | None = None,
adj_tree=None,
root: int = 0,
Expand Down
5 changes: 3 additions & 2 deletions src/scanpy/preprocessing/_pca/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
from scipy import sparse
from scipy.sparse import spmatrix

from ..._utils import AnyRandom, Empty
from ..._compat import _LegacyRandom
from ..._utils import Empty

CSMatrix = sparse.csr_matrix | sparse.csc_matrix

Expand Down Expand Up @@ -70,7 +71,7 @@ def pca(
layer: str | None = None,
zero_center: bool | None = True,
svd_solver: SvdSolver | None = None,
random_state: AnyRandom = 0,
random_state: _LegacyRandom = 0,
return_info: bool = False,
mask_var: NDArray[np.bool_] | str | None | Empty = _empty,
use_highly_variable: bool | None = None,
Expand Down
4 changes: 2 additions & 2 deletions src/scanpy/preprocessing/_pca/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from scipy import sparse
from sklearn.decomposition import PCA

from .._utils import AnyRandom
from ..._compat import _LegacyRandom

CSMatrix = sparse.csr_matrix | sparse.csc_matrix

Expand All @@ -29,7 +29,7 @@ def _pca_compat_sparse(
*,
solver: Literal["arpack", "lobpcg"],
mu: NDArray[np.floating] | None = None,
random_state: AnyRandom = None,
random_state: _LegacyRandom = None,
) -> tuple[NDArray[np.floating], PCA]:
"""Sparse PCA for scikit-learn <1.4"""
random_state = check_random_state(random_state)
Expand Down
Loading

0 comments on commit 99aa9da

Please sign in to comment.