From 874afca9615e1a78baee23755a52f29c8c364934 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 6 Mar 2024 15:07:40 -0800 Subject: [PATCH 01/55] Add support for processing based on dialects. - Prepping for Alembic usage --- .../BL_Python/database/migrations/__init__.py | 115 ------------------ .../BL_Python/database/schema/__init__.py | 16 +++ .../BL_Python/database/schema/dialect.py | 67 ++++++++++ .../BL_Python/database/schema/postgresql.py | 24 ++++ .../BL_Python/database/schema/sqlite.py | 24 ++++ src/database/BL_Python/database/types.py | 25 +++- 6 files changed, 155 insertions(+), 116 deletions(-) create mode 100644 src/database/BL_Python/database/schema/__init__.py create mode 100644 src/database/BL_Python/database/schema/dialect.py create mode 100644 src/database/BL_Python/database/schema/postgresql.py create mode 100644 src/database/BL_Python/database/schema/sqlite.py diff --git a/src/database/BL_Python/database/migrations/__init__.py b/src/database/BL_Python/database/migrations/__init__.py index ce824a72..e69de29b 100644 --- a/src/database/BL_Python/database/migrations/__init__.py +++ b/src/database/BL_Python/database/migrations/__init__.py @@ -1,115 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional, Protocol, cast, final - -from sqlalchemy.orm import DeclarativeMeta - -MetaBaseType = Type[DeclarativeMeta] - -if TYPE_CHECKING: - from typing import Dict, Protocol, Type, TypeVar, Union - - from sqlalchemy.engine import Dialect - - TBase = TypeVar("TBase") - - class TableNameCallback(Protocol): - def __call__( - self, - dialect_schema: "Union[str, None]", - full_table_name: str, - base_table: str, - meta_base: MetaBaseType, - ) -> None: ... - - class Connection(Protocol): - dialect: Dialect - - class Op(Protocol): - @staticmethod - def get_bind() -> Connection: ... - - -@final -class DialectHelper: - """ - Utilities to get database schema and table names - for different SQL dialects and database engines. - - For example, PostgreSQL supports schemas. This means: - * get_dialect_schema(meta) returns a schema name, if there is one, e.g. "cap" - * get_full_table_name(table_name, meta) returns the schema name, followed by the table name, e.g. " cap.assay_plate " - - SQLite does not support schemas. This means: - * get_dialect_schema(meta) returns None - * get_full_table_name(table_name, meta) returns the table name, with the schema name prepended to it, e.g. " 'cap.assay_plate' " - The key difference is that there is no schema, and the table name comes from the SQLite - engine instantiation, which prepends the "schema" to the table name. - """ - - dialect: "Dialect" - dialect_supports_schemas: bool - - def __init__(self, dialect: "Dialect"): - self.dialect = dialect - # right now we only care about SQLite and PSQL, - # so if the dialect is PSQL, then we consider the - # dialect to support schemas, otherwise it does not. - self.dialect_supports_schemas = dialect.name == "postgresql" - - @staticmethod - def get_schema(meta: "MetaBaseType"): - table_args = cast( - Optional[dict[str, str]], getattr(meta, "__table_args__", None) - ) - if table_args is None: - return None - return table_args.get("schema") - - def get_dialect_schema(self, meta: "MetaBaseType"): - """Get the database schema as a string, or None if the dialect does not support schemas.""" - if not self.dialect_supports_schemas: - return None - return DialectHelper.get_schema(meta) - - def get_full_table_name(self, table_name: str, meta: "MetaBaseType"): - """ - If the dialect supports schemas, then the table name does not have the schema prepended. - In dialects that don't support schemas, e.g., SQLite, the table name has the schema prepended. - This is because, when schemas are supported, the dialect automatically handles which schema - to use, while non-schema dialects do not reference any schemas. - """ - if self.get_dialect_schema(meta): - return table_name - else: - return f"{DialectHelper.get_schema(meta)}.{table_name}" - - def get_timestamp_sql(self): - timestamp_default_sql = "now()" - if self.dialect.name == "sqlite": - timestamp_default_sql = "CURRENT_TIMESTAMP" - return timestamp_default_sql - - @staticmethod - def iterate_table_names( - op: "Op", - schema_tables: "Dict[MetaBaseType, List[str]]", - table_name_callback: "TableNameCallback", - ): - """ - Call `table_name_callback` once for every table in every Base. - - op: The `op` object from Alembic. - schema_tables: A dictionary of the tables this call applies to for every Base. - table_name_callback: A callback executed for every table in `schema_tables`. - """ - dialect: Dialect = op.get_bind().dialect - schema = DialectHelper(dialect) - get_full_table_name = schema.get_full_table_name - get_dialect_schema = schema.get_dialect_schema - - for meta_base, schema_base_tables in schema_tables.items(): - dialect_schema = get_dialect_schema(meta_base) - for base_table in schema_base_tables: - full_table_name = get_full_table_name(base_table, meta_base) - table_name_callback( - dialect_schema, full_table_name, base_table, meta_base - ) diff --git a/src/database/BL_Python/database/schema/__init__.py b/src/database/BL_Python/database/schema/__init__.py new file mode 100644 index 00000000..b027d659 --- /dev/null +++ b/src/database/BL_Python/database/schema/__init__.py @@ -0,0 +1,16 @@ +from BL_Python.database.types import Op +from sqlalchemy.engine import Dialect + +from .postgresql import PostgreSQLDialect +from .sqlite import SQLiteDialect + +_dialect_type_map = {"sqlite": SQLiteDialect, "postgresql": PostgreSQLDialect} + + +def get_type_from_dialect(dialect: Dialect): + return _dialect_type_map[dialect.name](dialect) + + +def get_type_from_op(op: Op): + dialect: Dialect = op.get_bind().dialect + return get_type_from_dialect(dialect) diff --git a/src/database/BL_Python/database/schema/dialect.py b/src/database/BL_Python/database/schema/dialect.py new file mode 100644 index 00000000..2bbbf0d3 --- /dev/null +++ b/src/database/BL_Python/database/schema/dialect.py @@ -0,0 +1,67 @@ +from abc import ABC, abstractmethod + +from BL_Python.database.types import MetaBase, TableNameCallback +from sqlalchemy.engine import Dialect + + +class DialectBase(ABC): + supports_schemas: bool = False + + @staticmethod + def get_schema(meta: MetaBase): + table_args = meta.__table_args__ + + if isinstance(table_args, dict): + return table_args.get("schema") + + return None + + @staticmethod + def iterate_table_names( + dialect: "DialectBase", + schema_tables: dict[MetaBase, list[str]], + table_name_callback: TableNameCallback, + ): + """ + Call `table_name_callback` once for every table in every Base. + + op: The `op` object from Alembic. + schema_tables: A dictionary of the tables this call applies to for every Base. + table_name_callback: A callback executed for every table in `schema_tables`. + """ + get_full_table_name = dialect.get_full_table_name + get_dialect_schema = dialect.get_dialect_schema + + for meta_base, schema_base_tables in schema_tables.items(): + dialect_schema = get_dialect_schema(meta_base) + for base_table in schema_base_tables: + full_table_name = get_full_table_name(base_table, meta_base) + table_name_callback( + dialect_schema, full_table_name, base_table, meta_base + ) + + def get_dialect_schema(self, meta: MetaBase): + if self.supports_schemas: + return DialectBase.get_schema(meta) + + return None + + def get_full_table_name(self, table_name: str, meta: MetaBase): + """ + If the dialect supports schemas, then the table name does not have the schema prepended. + In dialects that don't support schemas, e.g., SQLite, the table name has the schema prepended. + This is because, when schemas are supported, the dialect automatically handles which schema + to use, while non-schema dialects do not reference any schemas. + """ + if self.get_dialect_schema(meta): + return table_name + else: + return f"{DialectBase.get_schema(meta)}.{table_name}" + + @property + @abstractmethod + def dialect(self) -> Dialect: ... + + @property + @abstractmethod + def get_timestamp_sql(self) -> str: ... diff --git a/src/database/BL_Python/database/schema/postgresql.py b/src/database/BL_Python/database/schema/postgresql.py new file mode 100644 index 00000000..8cd473ef --- /dev/null +++ b/src/database/BL_Python/database/schema/postgresql.py @@ -0,0 +1,24 @@ +from typing import final + +from BL_Python.database.schema.dialect import DialectBase +from sqlalchemy.engine import Dialect +from typing_extensions import override + + +@final +class PostgreSQLDialect(DialectBase): + _dialect: Dialect + supports_schemas: bool = True + + def __init__(self, dialect: Dialect) -> None: + self._dialect = dialect + + @property + @override + def dialect(self) -> Dialect: + return self._dialect + + @property + @override + def get_timestamp_sql(self): + return "now()" diff --git a/src/database/BL_Python/database/schema/sqlite.py b/src/database/BL_Python/database/schema/sqlite.py new file mode 100644 index 00000000..547e2fb2 --- /dev/null +++ b/src/database/BL_Python/database/schema/sqlite.py @@ -0,0 +1,24 @@ +from typing import final + +from BL_Python.database.schema.dialect import DialectBase +from sqlalchemy.engine import Dialect +from typing_extensions import override + + +@final +class SQLiteDialect(DialectBase): + _dialect: Dialect + supports_schemas: bool = False + + def __init__(self, dialect: Dialect) -> None: + self._dialect = dialect + + @property + @override + def dialect(self) -> Dialect: + return self._dialect + + @property + @override + def get_timestamp_sql(self): + return "CURRENT_TIMESTAMP" diff --git a/src/database/BL_Python/database/types.py b/src/database/BL_Python/database/types.py index fbb5d70c..9b1887aa 100644 --- a/src/database/BL_Python/database/types.py +++ b/src/database/BL_Python/database/types.py @@ -1,6 +1,10 @@ -from typing import Protocol, TypedDict +from typing import Protocol, TypedDict, TypeVar from sqlalchemy import Constraint, MetaData +from sqlalchemy.engine import Dialect + +TBase = TypeVar("TBase") + TableArgsDict = TypedDict("TableArgsDict", {"schema": str | None}) @@ -9,3 +13,22 @@ class MetaBase(Protocol): metadata: MetaData __tablename__: str __table_args__: tuple[Constraint | TableArgsDict, ...] | TableArgsDict + + +class TableNameCallback(Protocol): + def __call__( + self, + dialect_schema: str | None, + full_table_name: str, + base_table: str, + meta_base: MetaBase, + ) -> None: ... + + +class Connection(Protocol): + dialect: Dialect + + +class Op(Protocol): + @staticmethod + def get_bind() -> Connection: ... From 793b224ad0846d81aceab613155cefcf2b07be0e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 6 Mar 2024 15:55:22 -0800 Subject: [PATCH 02/55] Add tests for getting schemas. --- .../BL_Python/database/schema/__init__.py | 5 ++ src/database/test/unit/test_schema.py | 59 +++++++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 src/database/test/unit/test_schema.py diff --git a/src/database/BL_Python/database/schema/__init__.py b/src/database/BL_Python/database/schema/__init__.py index b027d659..3ede3bb6 100644 --- a/src/database/BL_Python/database/schema/__init__.py +++ b/src/database/BL_Python/database/schema/__init__.py @@ -8,6 +8,11 @@ def get_type_from_dialect(dialect: Dialect): + if not _dialect_type_map.get(dialect.name): + raise ValueError( + f"Unexpected dialect with name `{dialect.name}`. Expected one of {list(_dialect_type_map.keys())}." + ) + return _dialect_type_map[dialect.name](dialect) diff --git a/src/database/test/unit/test_schema.py b/src/database/test/unit/test_schema.py new file mode 100644 index 00000000..225f19e4 --- /dev/null +++ b/src/database/test/unit/test_schema.py @@ -0,0 +1,59 @@ +import pytest +from alembic.operations.base import Operations +from BL_Python.database.schema import get_type_from_dialect, get_type_from_op +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from mock import MagicMock +from sqlalchemy.engine import Dialect + + +@pytest.mark.parametrize( + "dialect_name,expected_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_dialect__returns_correct_dialect_instance( + dialect_name: str, expected_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_name + dialect_type = get_type_from_dialect(dialect) + assert isinstance(dialect_type, expected_type) + + +def test__get_type_from_dialect__raises_exception_when_given_unknown_dialect(): + dialect_name = "mssql" + dialect = Dialect() + dialect.name = dialect_name + + with pytest.raises( + ValueError, match=rf"Unexpected dialect with name `{dialect_name}`.+" + ): + _ = get_type_from_dialect(dialect) + + +@pytest.mark.parametrize( + "dialect_name,expected_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_op__returns_correct_dialect_instance( + dialect_name: str, expected_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_name + migration_context = MagicMock(impl=MagicMock(bind=MagicMock(dialect=dialect))) + op = Operations(migration_context) + dialect_type = get_type_from_op(op) + assert isinstance(dialect_type, expected_type) + + +def test__get_type_from_op__raises_exception_when_given_unknown_dialect(): + dialect_name = "mssql" + dialect = Dialect() + dialect.name = dialect_name + migration_context = MagicMock(impl=MagicMock(bind=MagicMock(dialect=dialect))) + op = Operations(migration_context) + + with pytest.raises( + ValueError, match=rf"Unexpected dialect with name `{dialect_name}`.+" + ): + _ = get_type_from_op(op) From c63a964db6d59ced209eac0ce5750eb320ab0290 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 7 Mar 2024 11:42:56 -0800 Subject: [PATCH 03/55] Add tests for dialect and schema methods to be used by Alembic. --- .../BL_Python/database/schema/dialect.py | 6 +- .../BL_Python/database/schema/postgresql.py | 7 +- .../BL_Python/database/schema/sqlite.py | 8 +- src/database/test/unit/schema/test_dialect.py | 169 ++++++++++++++++++ src/database/test/unit/test_engine.py | 11 ++ 5 files changed, 196 insertions(+), 5 deletions(-) create mode 100644 src/database/test/unit/schema/test_dialect.py diff --git a/src/database/BL_Python/database/schema/dialect.py b/src/database/BL_Python/database/schema/dialect.py index 2bbbf0d3..d30554bd 100644 --- a/src/database/BL_Python/database/schema/dialect.py +++ b/src/database/BL_Python/database/schema/dialect.py @@ -9,7 +9,7 @@ class DialectBase(ABC): @staticmethod def get_schema(meta: MetaBase): - table_args = meta.__table_args__ + table_args = hasattr(meta, "__table_args__") and meta.__table_args__ or None if isinstance(table_args, dict): return table_args.get("schema") @@ -60,8 +60,8 @@ def get_full_table_name(self, table_name: str, meta: MetaBase): @property @abstractmethod - def dialect(self) -> Dialect: ... + def dialect(self) -> Dialect: ... # pragma: nocover @property @abstractmethod - def get_timestamp_sql(self) -> str: ... + def timestamp_sql(self) -> str: ... # pragma: nocover diff --git a/src/database/BL_Python/database/schema/postgresql.py b/src/database/BL_Python/database/schema/postgresql.py index 8cd473ef..5b2a8ff6 100644 --- a/src/database/BL_Python/database/schema/postgresql.py +++ b/src/database/BL_Python/database/schema/postgresql.py @@ -7,10 +7,15 @@ @final class PostgreSQLDialect(DialectBase): + DIALECT_NAME = "postgresql" _dialect: Dialect supports_schemas: bool = True def __init__(self, dialect: Dialect) -> None: + if dialect.name != PostgreSQLDialect.DIALECT_NAME: + raise ValueError( + f"Invalid Dialect with name `{dialect.name}` provided for `{PostgreSQLDialect.__name__}`. Expected `sqlite`." + ) self._dialect = dialect @property @@ -20,5 +25,5 @@ def dialect(self) -> Dialect: @property @override - def get_timestamp_sql(self): + def timestamp_sql(self): return "now()" diff --git a/src/database/BL_Python/database/schema/sqlite.py b/src/database/BL_Python/database/schema/sqlite.py index 547e2fb2..6885e52c 100644 --- a/src/database/BL_Python/database/schema/sqlite.py +++ b/src/database/BL_Python/database/schema/sqlite.py @@ -7,10 +7,16 @@ @final class SQLiteDialect(DialectBase): + DIALECT_NAME = "sqlite" _dialect: Dialect supports_schemas: bool = False def __init__(self, dialect: Dialect) -> None: + if dialect.name != SQLiteDialect.DIALECT_NAME: + raise ValueError( + f"Invalid Dialect with name `{dialect.name}` provided for `{SQLiteDialect.__name__}`. Expected `sqlite`." + ) + self._dialect = dialect @property @@ -20,5 +26,5 @@ def dialect(self) -> Dialect: @property @override - def get_timestamp_sql(self): + def timestamp_sql(self): return "CURRENT_TIMESTAMP" diff --git a/src/database/test/unit/schema/test_dialect.py b/src/database/test/unit/schema/test_dialect.py new file mode 100644 index 00000000..f8a85b9b --- /dev/null +++ b/src/database/test/unit/schema/test_dialect.py @@ -0,0 +1,169 @@ +from typing import ClassVar + +import pytest +from BL_Python.database.schema.dialect import DialectBase +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from BL_Python.database.types import MetaBase +from BL_Python.programming.str import get_random_str +from mock import MagicMock +from sqlalchemy import Column, Integer +from sqlalchemy.engine import Dialect +from sqlalchemy.ext.declarative import declarative_base + + +def get_test_table(schema_name: str | None = "foo_schema"): + if schema_name is None: + + class _Base: ... # pyright: ignore[reportRedeclaration] + else: + + class _Base: + __table_args__ = {"schema": schema_name} + + Base = declarative_base(cls=_Base) + + class Foo(Base): # pyright: ignore[reportUntypedBaseClass] + __tablename__: ClassVar = "foo" + foo_id = Column("foo_id", Integer, primary_key=True) + + class Bar(Base): # pyright: ignore[reportUntypedBaseClass] + __tablename__: ClassVar = "bar" + bar_id = Column("bar_id", Integer, primary_key=True) + + return (schema_name, [Foo, Bar], _Base) + + +@pytest.fixture +def test_table(schema_name: str | None = "foo_schema"): + return get_test_table(schema_name) + + +@pytest.mark.parametrize("schema_name", ["foo_schema", None]) +def test__DialectBase__get_schema__returns_correct_table_schema_name( + schema_name: str | None, +): + (table_schema_name, tables, _) = get_test_table(schema_name) + + schema = DialectBase.get_schema(tables[0]) + + assert schema == table_schema_name + + +@pytest.mark.parametrize("dialect_type", [SQLiteDialect, PostgreSQLDialect]) +def test__DialectBase__init__raises_error_when_wrong_dialect_used( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], +): + dialect = Dialect() + dialect.name = get_random_str() + with pytest.raises( + ValueError, match=rf"Invalid Dialect with name `{dialect.name}`.+" + ): + _ = dialect_type(dialect) + + +@pytest.mark.parametrize( + "dialect_type,expected_schema_name", + [(SQLiteDialect, None), (PostgreSQLDialect, "foo_schema")], +) +def test__DialectBase__get_dialect_schema__returns_expected_schema_name( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_schema_name: str, + test_table: tuple[str, list[MetaBase], MetaBase], +): + (_, tables, _) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + + schema = test_dialect.get_dialect_schema(tables[0]) + + assert schema == expected_schema_name + + +@pytest.mark.parametrize( + "dialect_type,expected_table_name", + [(SQLiteDialect, "foo_schema.foo"), (PostgreSQLDialect, "foo")], +) +def test__DialectBase__get_full_table_name__returns_expected_table_name( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_table_name: str, + test_table: tuple[str, list[MetaBase], MetaBase], +): + (_, tables, _) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + + table_name = test_dialect.get_full_table_name("foo", tables[0]) + + assert table_name == expected_table_name + + +@pytest.mark.parametrize( + "dialect_type", + [SQLiteDialect, PostgreSQLDialect], +) +def test__DialectBase__iterate_table_names__calls_callback_for_every_table_in_metabase( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + test_table: tuple[str, list[MetaBase], MetaBase], +): + (schema_name, tables, meta_base) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + schema_tables = {meta_base: [table.__tablename__ for table in tables]} + table_name_callback = MagicMock() + + test_dialect.iterate_table_names(test_dialect, schema_tables, table_name_callback) + + if test_dialect.supports_schemas: + table_name_callback.assert_any_call( + schema_name, tables[0].__tablename__, tables[0].__tablename__, meta_base + ) + table_name_callback.assert_any_call( + schema_name, tables[1].__tablename__, tables[1].__tablename__, meta_base + ) + else: + table_name_callback.assert_any_call( + None, + f"{schema_name}.{tables[0].__tablename__}", + tables[0].__tablename__, + meta_base, + ) + table_name_callback.assert_any_call( + None, + f"{schema_name}.{tables[1].__tablename__}", + tables[1].__tablename__, + meta_base, + ) + + +@pytest.mark.parametrize( + "dialect_name,dialect_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_dialect__dialect_type_uses_correct_sqlalchemy_dialect( + dialect_name: str, dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + assert test_dialect.dialect.name == dialect_name + + +@pytest.mark.parametrize( + "dialect_type,expected_sql", + [ + (SQLiteDialect, "CURRENT_TIMESTAMP"), + (PostgreSQLDialect, "now()"), + ], +) +def test__get_type_from_dialect__dialect_type_uses_correct_timestamp_sql( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_sql: str, +): + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + assert test_dialect.timestamp_sql == expected_sql diff --git a/src/database/test/unit/test_engine.py b/src/database/test/unit/test_engine.py index afa6acbd..c22fcb68 100644 --- a/src/database/test/unit/test_engine.py +++ b/src/database/test/unit/test_engine.py @@ -129,6 +129,17 @@ def test__PostgreSQLScopedSession__create__uses_correct_connection_pool_type( assert isinstance(session.bind.pool, connection_pool_type) # type: ignore[reportUnknownMemberType,reportAttributeAccessIssue,reportOptionalMemberAccess] +def test__PostgreSQLScopedSession__create__verifies_dependencies_installed( + mocker: MockerFixture, +): + _ = mocker.patch( + "BL_Python.database.engine.postgresql.find_spec", return_value=None + ) + + with pytest.raises(ModuleNotFoundError): + _ = PostgreSQLScopedSession.create(POSTGRESQL_TEST_CONNECTION_STR) + + @patch.object(MetaData, "reflect", MagicMock()) @pytest.mark.parametrize( "session_type,connection_string", From cfff139a866200c15709682a2f70dcbbac36b7ef Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 7 Mar 2024 15:42:19 -0800 Subject: [PATCH 04/55] Initial supporting code for Alembic to run. --- src/database/BL_Python/database/config.py | 4 + .../database/migrations/alembic/README | 0 .../database/migrations/alembic/__init__.py | 0 .../database/migrations/alembic/alembic.ini | 100 ++++++++ .../database/migrations/alembic/env.py | 27 ++ .../database/migrations/alembic/env_setup.py | 232 ++++++++++++++++++ .../migrations/alembic/script.py.mako | 24 ++ src/database/BL_Python/database/types.py | 4 +- src/database/MANIFEST.in | 3 +- .../BL_Python/programming/config/__init__.py | 3 +- 10 files changed, 393 insertions(+), 4 deletions(-) create mode 100644 src/database/BL_Python/database/migrations/alembic/README create mode 100644 src/database/BL_Python/database/migrations/alembic/__init__.py create mode 100644 src/database/BL_Python/database/migrations/alembic/alembic.ini create mode 100644 src/database/BL_Python/database/migrations/alembic/env.py create mode 100644 src/database/BL_Python/database/migrations/alembic/env_setup.py create mode 100644 src/database/BL_Python/database/migrations/alembic/script.py.mako diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index c2de07b2..d03cbc3d 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -11,3 +11,7 @@ class DatabaseConfig(BaseModel, AbstractConfig): connection_string: str = "sqlite:///:memory:" sqlalchemy_echo: bool = False connect_args: DatabaseConnectArgsConfig | None = None + + +class Config(BaseModel, AbstractConfig): + database: DatabaseConfig diff --git a/src/database/BL_Python/database/migrations/alembic/README b/src/database/BL_Python/database/migrations/alembic/README new file mode 100644 index 00000000..e69de29b diff --git a/src/database/BL_Python/database/migrations/alembic/__init__.py b/src/database/BL_Python/database/migrations/alembic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/database/BL_Python/database/migrations/alembic/alembic.ini b/src/database/BL_Python/database/migrations/alembic/alembic.ini new file mode 100644 index 00000000..11ce0b21 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/alembic.ini @@ -0,0 +1,100 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S \ No newline at end of file diff --git a/src/database/BL_Python/database/migrations/alembic/env.py b/src/database/BL_Python/database/migrations/alembic/env.py new file mode 100644 index 00000000..d0d89399 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/env.py @@ -0,0 +1,27 @@ +from pathlib import Path + +from alembic import context +from BL_Python.database.config import Config, DatabaseConfig +from BL_Python.database.migrations.alembic.env_setup import AlembicEnvSetup +from BL_Python.database.types import MetaBase +from BL_Python.programming.config import ConfigBuilder, load_config +from BL_Python.programming.dependency_injection import ConfigModule +from injector import Injector + + +def run_migrations(bases: list[MetaBase], config_filename: Path | None = None): + if config_filename is None: + config_filename = Path("config.toml") + + config_type = ConfigBuilder[Config]().with_root_config(Config).build() + config = load_config(config_type, config_filename) + config_module = ConfigModule(config, DatabaseConfig) + + ioc_container = Injector(config_module) + + alembic_env = ioc_container.create_object(AlembicEnvSetup) + + if context.is_offline_mode(): + alembic_env.run_migrations_offline(bases) + else: + alembic_env.run_migrations_online(bases) diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py new file mode 100644 index 00000000..f0a5d3f7 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -0,0 +1,232 @@ +# import logging +# from configparser import ConfigParser +from dataclasses import dataclass +from functools import lru_cache +from logging.config import fileConfig +from typing import Any, List, Protocol, cast, final + +from alembic import context +from BL_Python.database.config import DatabaseConfig +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from BL_Python.database.types import MetaBase +from injector import inject + +# TODO only do this when using PostgreSQL, +# and detect if the module is installed +# so we can show a helpful error message +from psycopg2.errors import UndefinedTable +from sqlalchemy import MetaData, Table, engine_from_config, pool +from sqlalchemy.engine import Connectable, Connection, Engine +from sqlalchemy.exc import ProgrammingError + +# from AWS import load_ssm_application_parameters + + +class type_include_object(Protocol): + def __call__( + self, object: Table, name: str, type_: str, reflected: Any, compare_to: Any + ) -> bool: ... + + +class type_include_schemas(Protocol): + def __call__(self, names: List[str]) -> type_include_object: ... + + +@dataclass +class type_metadata: + include_schemas: type_include_schemas + target_metadata: List[MetaData] + schemas: List[str] + + +@final +class AlembicEnvSetup: + _config: DatabaseConfig + + @inject + def __init__(self, config: DatabaseConfig) -> None: + self._config = config + + @lru_cache(maxsize=1) + def get_config(self): + # aws_ssm_config = ConfigParser() + # loaded_config_files = aws_ssm_config.read("aws-ssm.ini") + # if loaded_config_files: + # load_ssm_application_parameters(aws_ssm_config) + # else: + # logging.getLogger().info( + # "Could not read aws-ssm.ini config file. Skipping SSM parameter lookup." + # ) + + # this is the Alembic Config object, which provides + # access to the values within the .ini file in use. + config = context.config + + # Interpret the config file for Python logging. + # This line sets up loggers basically. + if config.config_file_name is not None: + # raise Exception("Config file is missing.") + fileConfig(config.config_file_name) + + # FIXME why is this here? + config.set_main_option( + "sqlalchemy.url", + "sqlite:///.app.db", + ) + + return config + + _type_metadata: type_metadata | None = None + + def get_metadata(self, bases: list[MetaBase]): + if self._type_metadata is not None: + return self._type_metadata + + def include_schemas(names: List[str]): + def include_object( + object: Table, name: str, type_: str, reflected: Any, compare_to: Any + ): + if type_ == "table": + return object.schema in names + return True + + return include_object + + target_metadata = [base.metadata for base in bases] + schemas = [ + base.__table_args__["schema"] + for base in bases + if hasattr(base, "__table_args__") + and isinstance(base.__table_args__, dict) + and base.__table_args__["schema"] is not None + ] + + self._type_metadata = type_metadata(include_schemas, target_metadata, schemas) + return self._type_metadata + + def _configure_context( + self, bases: list[MetaBase], connection: Connection | Connectable | Engine + ): + metadata = self.get_metadata(bases) + target_metadata = metadata.target_metadata + include_schemas = metadata.include_schemas + schemas = metadata.schemas + + if connection.engine is None: + raise Exception("Unknown error. Connection engine is not set.") + + if not isinstance(connection, Connection): + raise Exception( + f"Unknown error. Connection is not a connection; it is a `{type(connection).__name__}`." + ) + + if connection.engine.name == SQLiteDialect.DIALECT_NAME: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + render_as_batch=True, + ) + elif connection.engine.name == PostgreSQLDialect.DIALECT_NAME: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + ) + else: + raise Exception( + f"Unsupported database dialect `{connection.engine.name}`. Expected one of {[SQLiteDialect.DIALECT_NAME, PostgreSQLDialect.DIALECT_NAME]}" + ) + + def _run_migrations( + self, bases: list[MetaBase], connection: Connection | Connectable | Engine + ): + if connection.engine is None: + raise Exception( + "SQLAlchemy Session is not bound to an engine. This is not supported." + ) + + metadata = self.get_metadata(bases) + schemas = metadata.schemas + with context.begin_transaction(): + try: + if connection.engine.name == "postgresql": + _ = connection.execute( + f"SET search_path TO {','.join(schemas)},public;" + ) + context.run_migrations() + except ProgrammingError as error: + # This occurs when downgrading from the very last version + # because the `alembic_version` table is dropped. The exception + # can be safely ignored because the migration commits the transaction + # before the failure, and there is nothing left for Alembic to do. + if not ( + type(error.orig) is UndefinedTable + and "DELETE FROM alembic_version" in error.statement + ): + raise + + def run_migrations_offline(self, bases: list[MetaBase]): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + + config = self.get_config() + metadata = self.get_metadata(bases) + target_metadata = metadata.target_metadata + include_schemas = metadata.include_schemas + schemas = metadata.schemas + + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + ) + + with context.begin_transaction(): + context.run_migrations() + + def run_migrations_online(self, bases: list[MetaBase]): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + config = self.get_config() + + connectable: Connectable | None = cast(dict[str, Any], config.attributes).get( # pyright: ignore[reportUnknownMemberType] + "connection", None + ) + + if connectable: + self._configure_context(bases, connectable) + self._run_migrations(bases, connectable) + else: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + self._configure_context(bases, connection) + self._run_migrations(bases, connection) diff --git a/src/database/BL_Python/database/migrations/alembic/script.py.mako b/src/database/BL_Python/database/migrations/alembic/script.py.mako new file mode 100644 index 00000000..2c015630 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/src/database/BL_Python/database/types.py b/src/database/BL_Python/database/types.py index 9b1887aa..115574c9 100644 --- a/src/database/BL_Python/database/types.py +++ b/src/database/BL_Python/database/types.py @@ -16,7 +16,7 @@ class MetaBase(Protocol): class TableNameCallback(Protocol): - def __call__( + def __call__( # pragma: nocover self, dialect_schema: str | None, full_table_name: str, @@ -29,6 +29,6 @@ class Connection(Protocol): dialect: Dialect -class Op(Protocol): +class Op(Protocol): # pragma: nocover @staticmethod def get_bind() -> Connection: ... diff --git a/src/database/MANIFEST.in b/src/database/MANIFEST.in index cfda70e0..b8efaa18 100644 --- a/src/database/MANIFEST.in +++ b/src/database/MANIFEST.in @@ -1,2 +1,3 @@ +graft BL_Python/database/migrations/alembic/ global-include *.pyi -global-include py.typed +global-include py.typed \ No newline at end of file diff --git a/src/programming/BL_Python/programming/config/__init__.py b/src/programming/BL_Python/programming/config/__init__.py index b1f5b133..fb2e5f5f 100644 --- a/src/programming/BL_Python/programming/config/__init__.py +++ b/src/programming/BL_Python/programming/config/__init__.py @@ -1,4 +1,5 @@ import abc +from pathlib import Path from typing import Any, Generic, TypeVar, cast import toml @@ -59,7 +60,7 @@ def build(self) -> type[TConfig]: def load_config( config_type: type[TConfig], - toml_file_path: str, + toml_file_path: str | Path, config_overrides: AnyDict | None = None, ) -> TConfig: config_dict: dict[str, Any] = toml.load(toml_file_path) From 223341f383dacb1ee37949555c5ceaeb1242848e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 7 Mar 2024 16:21:05 -0800 Subject: [PATCH 05/55] Add tests for Alembic migration bootstrapping. --- .../test/unit/migrations/alembic/test_env.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 src/database/test/unit/migrations/alembic/test_env.py diff --git a/src/database/test/unit/migrations/alembic/test_env.py b/src/database/test/unit/migrations/alembic/test_env.py new file mode 100644 index 00000000..f6adc59e --- /dev/null +++ b/src/database/test/unit/migrations/alembic/test_env.py @@ -0,0 +1,60 @@ +from pathlib import Path +from unittest.mock import ANY + +import pytest +from BL_Python.database.migrations.alembic.env import run_migrations +from BL_Python.programming.str import get_random_str +from mock import MagicMock +from pytest_mock import MockerFixture + + +def test__run_migrations__sets_default_config_filename(mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Injector") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.load_config") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.context") + path_mock = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + + run_migrations(MagicMock()) + + path_mock.assert_called_once_with("config.toml") + + +def test__run_migrations__uses_specified_config_filename(mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Injector") + config_mock = mocker.patch("BL_Python.database.migrations.alembic.env.Config") + load_config_mock = mocker.patch( + "BL_Python.database.migrations.alembic.env.load_config" + ) + _ = mocker.patch("BL_Python.database.migrations.alembic.env.context") + + config_filename = Path(get_random_str()) + run_migrations(MagicMock(), config_filename=config_filename) + + load_config_mock.assert_called_once_with(config_mock, config_filename) + + +@pytest.mark.parametrize("mode", ["online", "offline"]) +def test__run_migrations__runs_correct_migration_mode(mode: str, mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.load_config") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + _ = mocker.patch( + "BL_Python.database.migrations.alembic.env.context", + is_offline_mode=MagicMock(return_value=mode == "offline"), + ) + alembic_env_setup_mock = MagicMock( + run_migrations_offline=MagicMock(), run_migrations_online=MagicMock() + ) + _ = mocker.patch( + "BL_Python.database.migrations.alembic.env.Injector", + return_value=MagicMock( + create_object=MagicMock(return_value=alembic_env_setup_mock) + ), + ) + + run_migrations(MagicMock()) + + if mode == "offline": + alembic_env_setup_mock.run_migrations_offline.assert_called_once() + else: + alembic_env_setup_mock.run_migrations_online.assert_called_once() From a63b649926ea79dbed10da7aa629a8ebdcb16bc8 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 8 Mar 2024 15:29:51 -0800 Subject: [PATCH 06/55] Set specific versions for code quality tools. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 50c5a25b..5698e06f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,7 @@ dev-dependencies = [ "pytest-mock", "mock", "pytest-cov ~= 4.1", + "coverage ~= 7.4", "pyright ~= 1.1", "isort ~= 5.13", "ruff ~= 0.3" From aba18450da0ba9a711a2d47149885fcce3afb37d Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 13 Mar 2024 15:17:58 -0700 Subject: [PATCH 07/55] Remove dist directory on `make clean` --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index c14f518b..7642f1b9 100644 --- a/Makefile +++ b/Makefile @@ -215,6 +215,7 @@ publish-all : reset $(VENV) clean-build : find . -type d \( \ -name build \ + -o -name dist \ -o -name __pycache__ \ -o -name \*.egg-info \ -o -name .pytest-cache \ @@ -241,4 +242,4 @@ reset-check: @( read -p "Are you sure? [y/N]: " response && case "$$response" in [yY]) true;; *) false;; esac ) reset : reset-check clean - git checkout -- $(PYPROJECT_FILES) \ No newline at end of file + git checkout -- $(PYPROJECT_FILES) From 50d4d05325d080e4880522af635b84b25e81aa12 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 15 Mar 2024 15:32:16 -0700 Subject: [PATCH 08/55] Fix issue caused by improper handling of invalid database configurations. This change enables dynamic database dialect configurations through Pydantic by ignoring attributes that are irrelevant to a given dialect. --- src/database/BL_Python/database/config.py | 31 ++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index d03cbc3d..f3308651 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -1,16 +1,45 @@ +from BL_Python.programming.collections.dict import AnyDict from BL_Python.programming.config import AbstractConfig from pydantic import BaseModel +from pydantic.config import ConfigDict class DatabaseConnectArgsConfig(BaseModel): + # allow any values, as this type is not + # specifically the type to be used elsewhere + model_config = ConfigDict(extra="allow") + + +class PostgreSQLDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): + # ignore anything that DatabaseConnectArgsConfig + # allowed to be set, except for any other attributes + # of this class, which will end up assigned through + # the instatiation of the __init__ override of DatabaseConfig + model_config = ConfigDict(extra="ignore") + sslmode: str = "" options: str = "" +class SQLiteDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): + model_config = ConfigDict(extra="ignore") + + class DatabaseConfig(BaseModel, AbstractConfig): + def __init__(self, **data: AnyDict): + super().__init__(**data) + + model_data = self.connect_args.model_dump() if self.connect_args else {} + if self.connection_string.startswith("sqlite://"): + self.connect_args = SQLiteDatabaseConnectArgsConfig(**model_data) + elif self.connection_string.startswith("postgresql://"): + self.connect_args = PostgreSQLDatabaseConnectArgsConfig(**model_data) + connection_string: str = "sqlite:///:memory:" sqlalchemy_echo: bool = False - connect_args: DatabaseConnectArgsConfig | None = None + # the static field allows Pydantic to store + # values from a dictionary + connect_args: DatabaseConnectArgsConfig | None class Config(BaseModel, AbstractConfig): From 5a2bdb15d5c27df2d726ebf17b043fb0395478a9 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 15 Mar 2024 15:33:10 -0700 Subject: [PATCH 09/55] Adjustment to Makefile. - ignore .git and .venv during clean for speed - use `$(MAKE)` instead of `make` for reliability --- Makefile | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 7642f1b9..b0b2d72a 100644 --- a/Makefile +++ b/Makefile @@ -213,7 +213,12 @@ publish-all : reset $(VENV) clean-build : - find . -type d \( \ + find . -type d \ + \( \ + -path ./$(VENV) \ + -o -path ./.git \ + \) -prune -false \ + -o \( \ -name build \ -o -name dist \ -o -name __pycache__ \ @@ -233,8 +238,8 @@ clean : clean-build clean-test @echo '\nDeactivate your venv with `deactivate`' remake : - make clean - make + $(MAKE) clean + $(MAKE) reset-check: # https://stackoverflow.com/a/47839479 From f2026eb300f41fd908a07539a361265483d4be4a Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 12:23:22 -0700 Subject: [PATCH 10/55] Add reports directory. This directory will be used to output ephemeral data, like pytest reports. --- reports/.gitignore | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 reports/.gitignore diff --git a/reports/.gitignore b/reports/.gitignore new file mode 100644 index 00000000..cf9001b2 --- /dev/null +++ b/reports/.gitignore @@ -0,0 +1,6 @@ +# https://stackoverflow.com/a/932982 + +# Ignore everything in this directory +* +# Except this file +!.gitignore From 201037cb3cd320bbdc7adf7cbd180e13d5e599d3 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 12:46:15 -0700 Subject: [PATCH 11/55] Change where pytest reports end up. Also: - improve Makefile a bit w/ explicit PHONY targets - parallelize test jobs - continue all test jobs if any fail --- Makefile | 27 +++++++++++++++++++++------ pyproject.toml | 6 ++++-- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/Makefile b/Makefile index b0b2d72a..b09ccbd6 100644 --- a/Makefile +++ b/Makefile @@ -23,8 +23,15 @@ GITHUB_REF ?= 00000000-0000-0000-0000-000000000000 # Can be overridden. GITHUB_WORKSPACE ?= $(CURDIR) +# What repository to publish packages to. +# `testpypi` and `pypi` are valid values. PYPI_REPO ?= testpypi +# The directory to write ephermal reports to, +# such as pytest coverage reports. +REPORTS_DIR ?= reports + + # Can be overridden. This is used to change the prereqs # of some supporting targets, like `format-ruff`. # This variable is reassigned to whichever of the dev/cicd @@ -69,8 +76,10 @@ PACKAGES=BL_Python.all $(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildca # Rather than duplicating BL_Python.all, # just prereq it. +.PHONY: dev dev : dev_mode BL_Python.all +.PHONY: cicd cicd : cicd_mode $(VENV) $(PYPROJECT_FILES) @if [ -f $(call package_to_inst,) ]; then echo "Package is already built, skipping..." @@ -165,6 +174,7 @@ format-ruff : $(VENV) $(DEFAULT_TARGET) ruff format --preview --respect-gitignore +.PHONY: format format-ruff format-isort format : $(VENV) $(DEFAULT_TARGET) format-isort format-ruff @@ -198,13 +208,17 @@ test-pytest : $(VENV) $(DEFAULT_TARGET) $(ACTIVATE_VENV) pytest $(PYTEST_FLAGS) - coverage html -d coverage + coverage html --data-file=$(REPORTS_DIR)/pytest/.coverage +.PHONY: test test-pytest test-pyright test-ruff test-isort +_test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-pytest test : CMD_PREFIX=@ -test : $(VENV) $(DEFAULT_TARGET) clean-test test-isort test-ruff test-pyright test-pytest +test : clean-test + $(MAKE) -j --keep-going _test # Publishing should use a real install, which `cicd` fulfills +.PHONY: publish-all publish-all : REWRITE_DEPENDENCIES=false publish-all : reset $(VENV) $(ACTIVATE_VENV) @@ -227,16 +241,16 @@ clean-build : \) -prune -exec rm -rf {} \; clean-test : - $(CMD_PREFIX)rm -rf cov.xml \ - pytest.xml \ - coverage \ - .coverage + $(CMD_PREFIX)rm -rf \ + $(REPORTS_DIR)/pytest +.PHONY: clean clean-test clean-build clean : clean-build clean-test rm -rf $(VENV) @echo '\nDeactivate your venv with `deactivate`' +.PHONY: remake remake : $(MAKE) clean $(MAKE) @@ -246,5 +260,6 @@ reset-check: @echo -n "This will make destructive changes! Considering stashing changes first.\n" @( read -p "Are you sure? [y/N]: " response && case "$$response" in [yY]) true;; *) false;; esac ) +.PHONY: reset reset-check reset : reset-check clean git checkout -- $(PYPROJECT_FILES) diff --git a/pyproject.toml b/pyproject.toml index 5698e06f..240ef6e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -173,9 +173,9 @@ addopts = [ # and # https://github.com/microsoft/vscode-python/issues/21845 "--cov=.", - "--junitxml=pytest.xml", + "--junitxml=reports/pytest/pytest.xml", "-o=junit_family=xunit2", - "--cov-report=xml:cov.xml", + "--cov-report=xml:reports/pytest/cov.xml", "--cov-report=term-missing", ] @@ -187,9 +187,11 @@ norecursedirs = "__pycache__ build .pytest_cache *.egg-info .venv .github-venv" include_namespace_packages = true [tool.coverage.html] +directory = "reports/pytest/coverage" show_contexts = true [tool.coverage.run] +data_file = "reports/pytest/.coverage" dynamic_context = "test_function" relative_files = true omit = [ From 75b1312970438f5fecc7df98566c1781d0ce893e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 12:52:46 -0700 Subject: [PATCH 12/55] Fix misplaced comments. --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index b09ccbd6..67a2ce77 100644 --- a/Makefile +++ b/Makefile @@ -74,9 +74,9 @@ PYPROJECT_FILES=./pyproject.toml $(wildcard src/*/pyproject.toml) PACKAGE_PATHS=$(subst /pyproject.toml,,$(PYPROJECT_FILES)) PACKAGES=BL_Python.all $(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) +.PHONY: dev # Rather than duplicating BL_Python.all, # just prereq it. -.PHONY: dev dev : dev_mode BL_Python.all .PHONY: cicd @@ -217,8 +217,8 @@ test : clean-test $(MAKE) -j --keep-going _test -# Publishing should use a real install, which `cicd` fulfills .PHONY: publish-all +# Publishing should use a real install, which `cicd` fulfills publish-all : REWRITE_DEPENDENCIES=false publish-all : reset $(VENV) $(ACTIVATE_VENV) From ff4baca85e9d9e975379c11ca7adf6217841f111 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 16:08:47 -0700 Subject: [PATCH 13/55] Add junit2html to dependencies for report generation. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 240ef6e4..a1ac34ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,7 @@ dev-dependencies = [ "mock", "pytest-cov ~= 4.1", "coverage ~= 7.4", + "junit2html ~= 30.1", "pyright ~= 1.1", "isort ~= 5.13", "ruff ~= 0.3" From 35ff5b063f55287110d3d44ed1b524971c2a4ba0 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 16:30:02 -0700 Subject: [PATCH 14/55] Output JUnit/XUnit HTML report. --- Makefile | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 67a2ce77..160c20f6 100644 --- a/Makefile +++ b/Makefile @@ -207,8 +207,14 @@ test-pyright : $(VENV) $(DEFAULT_TARGET) test-pytest : $(VENV) $(DEFAULT_TARGET) $(ACTIVATE_VENV) - pytest $(PYTEST_FLAGS) - coverage html --data-file=$(REPORTS_DIR)/pytest/.coverage + pytest $(PYTEST_FLAGS) \ + && PYTEST_SUCCESS=0 \ + || PYTEST_SUCCESS=$$? + + -coverage html --data-file=$(REPORTS_DIR)/pytest/.coverage + -junit2html $(REPORTS_DIR)/pytest/pytest.xml $(REPORTS_DIR)/pytest/pytest.html + + exit $$PYTEST_SUCCESS .PHONY: test test-pytest test-pyright test-ruff test-isort _test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-pytest From 6339079fcaf16a0a39ec4e31dfe33ddc4b4f289a Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 16:34:38 -0700 Subject: [PATCH 15/55] Change the pytest artifact path for upload. --- .github/workflows/CICD.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index af8969b6..656196fd 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -182,10 +182,7 @@ jobs: with: name: pytest-and-coverage-report path: | - pytest.xml - cov.xml - .coverage - coverage/ + reports/pytest/ retention-days: 1 if-no-files-found: error From 5cd25d50a41e72ae339db4827fc9d8ce1fde3446 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 10:40:57 -0700 Subject: [PATCH 16/55] Fix DatabaseConfig error with optional field not having a default. --- src/database/BL_Python/database/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index f3308651..51c52563 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -39,7 +39,7 @@ def __init__(self, **data: AnyDict): sqlalchemy_echo: bool = False # the static field allows Pydantic to store # values from a dictionary - connect_args: DatabaseConnectArgsConfig | None + connect_args: DatabaseConnectArgsConfig | None = None class Config(BaseModel, AbstractConfig): From 9558dde11cebf5d0822bd2c0d19a3b2670bdca9d Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 11:01:50 -0700 Subject: [PATCH 17/55] Fix Pyright errors. --- src/database/BL_Python/database/config.py | 5 +++-- src/database/test/unit/migrations/alembic/test_env.py | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index 51c52563..9dd2371d 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -1,4 +1,5 @@ -from BL_Python.programming.collections.dict import AnyDict +from typing import Any + from BL_Python.programming.config import AbstractConfig from pydantic import BaseModel from pydantic.config import ConfigDict @@ -26,7 +27,7 @@ class SQLiteDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): class DatabaseConfig(BaseModel, AbstractConfig): - def __init__(self, **data: AnyDict): + def __init__(self, **data: Any): super().__init__(**data) model_data = self.connect_args.model_dump() if self.connect_args else {} diff --git a/src/database/test/unit/migrations/alembic/test_env.py b/src/database/test/unit/migrations/alembic/test_env.py index f6adc59e..3ddb2f57 100644 --- a/src/database/test/unit/migrations/alembic/test_env.py +++ b/src/database/test/unit/migrations/alembic/test_env.py @@ -1,5 +1,4 @@ from pathlib import Path -from unittest.mock import ANY import pytest from BL_Python.database.migrations.alembic.env import run_migrations From 9d8d7e653beccb70bee8e65c05f28019ba6353cf Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 10:40:57 -0700 Subject: [PATCH 18/55] Fix DatabaseConfig error with optional field not having a default. --- src/database/BL_Python/database/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index f3308651..51c52563 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -39,7 +39,7 @@ def __init__(self, **data: AnyDict): sqlalchemy_echo: bool = False # the static field allows Pydantic to store # values from a dictionary - connect_args: DatabaseConnectArgsConfig | None + connect_args: DatabaseConnectArgsConfig | None = None class Config(BaseModel, AbstractConfig): From 4480ad8e516b9fb47c910ff601b348cfc669eff0 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 11:01:50 -0700 Subject: [PATCH 19/55] Fix Pyright errors. --- src/database/BL_Python/database/config.py | 5 +++-- src/database/test/unit/migrations/alembic/test_env.py | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index 51c52563..9dd2371d 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -1,4 +1,5 @@ -from BL_Python.programming.collections.dict import AnyDict +from typing import Any + from BL_Python.programming.config import AbstractConfig from pydantic import BaseModel from pydantic.config import ConfigDict @@ -26,7 +27,7 @@ class SQLiteDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): class DatabaseConfig(BaseModel, AbstractConfig): - def __init__(self, **data: AnyDict): + def __init__(self, **data: Any): super().__init__(**data) model_data = self.connect_args.model_dump() if self.connect_args else {} diff --git a/src/database/test/unit/migrations/alembic/test_env.py b/src/database/test/unit/migrations/alembic/test_env.py index f6adc59e..3ddb2f57 100644 --- a/src/database/test/unit/migrations/alembic/test_env.py +++ b/src/database/test/unit/migrations/alembic/test_env.py @@ -1,5 +1,4 @@ from pathlib import Path -from unittest.mock import ANY import pytest from BL_Python.database.migrations.alembic.env import run_migrations From 4a3cc2fd0cb3f35a3d8bf5de6a6a3f195c83a152 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 11:39:19 -0700 Subject: [PATCH 20/55] Rename `SUCCESS` var to `EXIT_CODE` to better represent what it is. --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 160c20f6..9245b638 100644 --- a/Makefile +++ b/Makefile @@ -208,13 +208,13 @@ test-pytest : $(VENV) $(DEFAULT_TARGET) $(ACTIVATE_VENV) pytest $(PYTEST_FLAGS) \ - && PYTEST_SUCCESS=0 \ - || PYTEST_SUCCESS=$$? + && PYTEST_EXIT_CODE=0 \ + || PYTEST_EXIT_CODE=$$? -coverage html --data-file=$(REPORTS_DIR)/pytest/.coverage -junit2html $(REPORTS_DIR)/pytest/pytest.xml $(REPORTS_DIR)/pytest/pytest.html - exit $$PYTEST_SUCCESS + exit $$PYTEST_EXIT_CODE .PHONY: test test-pytest test-pyright test-ruff test-isort _test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-pytest From da03a8732a1b1ffc517ec6febecfa27eb26addbd Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 19 Mar 2024 16:16:18 -0700 Subject: [PATCH 21/55] Add `bl-alembic` command. This command wraps `alembic` and generated a default `alembic.ini` if one is not specified on the command line. This will allow us to provide a default configuration file without requiring additional setup by users of the tool. Still to figure out: what the convention is for the location of database migration files in general, and for scaffolded applications. They will likely not follow the same conventions. --- .../database/migrations/alembic/__main__.py | 59 ++++++++++++++++++ .../database/migrations/alembic/alembic.ini | 61 +------------------ src/database/pyproject.toml | 3 + 3 files changed, 63 insertions(+), 60 deletions(-) create mode 100644 src/database/BL_Python/database/migrations/alembic/__main__.py diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py new file mode 100644 index 00000000..2eccf637 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -0,0 +1,59 @@ +import logging +import sys +import tempfile +from os import environ +from pathlib import Path + +# this is Alembic's main entry point +from alembic.config import CommandLine + + +def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None): + logging.basicConfig(level=logging.INFO) + if not log_level: + log_level = environ.get("LOG_LEVEL") + log_level = int(log_level) if log_level else logging.INFO + + logger = logging.getLogger() + logger.setLevel(log_level) + + if not argv: + argv = sys.argv[1:] + + # do some set up stuff + + alembic_command_line = CommandLine(None) + # alembic_parsed_args = alembic_command_line.parser.parse_args(argv) + + # if a config file has been specified on the + # command line, use it and don't create + # a temporary one + # print(alembic_parsed_args) + args = set(argv) + + if ( + not args + or "-c" in args + or "--config" in args + or "-h" in args + or "--help" in args + ): + logger.debug("Running unmodified `alembic` command.") + # run Alembic + return alembic_command_line.main(argv) + + logger.debug("Running `alembic` with modified command.") + with ( + open(Path(Path(__file__).resolve().parent, "alembic.ini"), "r") as f1, + tempfile.NamedTemporaryFile("w+b") as f2, + ): + f2.writelines(f1.buffer) + # the file will not be read correctly + # without seeking to the 0th byte + _ = f2.seek(0) + + argv.insert(0, "-c") + argv.insert(1, f2.name) + + # run Alembic + return alembic_command_line.main(argv) diff --git a/src/database/BL_Python/database/migrations/alembic/alembic.ini b/src/database/BL_Python/database/migrations/alembic/alembic.ini index 11ce0b21..4233cf8e 100644 --- a/src/database/BL_Python/database/migrations/alembic/alembic.ini +++ b/src/database/BL_Python/database/migrations/alembic/alembic.ini @@ -1,70 +1,11 @@ -# A generic, single database configuration. - [alembic] -# path to migration scripts script_location = migrations - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. Valid values are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # default: use os.pathsep - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - +version_path_separator = os sqlalchemy.url = driver://user:pass@localhost/dbname - [post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME -# Logging configuration [loggers] keys = root,sqlalchemy,alembic diff --git a/src/database/pyproject.toml b/src/database/pyproject.toml index ec2f38a2..81c90173 100644 --- a/src/database/pyproject.toml +++ b/src/database/pyproject.toml @@ -57,6 +57,9 @@ exclude = ["build*"] [tool.setuptools.package-data] "BL_Python.database" = ["py.typed"] +[project.scripts] +bl-alembic = "BL_Python.database.migrations.alembic.__main__:bl_alembic" + [project.optional-dependencies] postgres = [ "psycopg2 ~= 2.9" From e2e1210eb0945ae56b45e6b21d565357232d08d5 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 20 Mar 2024 12:22:00 -0700 Subject: [PATCH 22/55] Updated Alembic type stubs. --- .../database/typings}/alembic/__init__.pyi | 1 - .../database/typings}/alembic/__main__.pyi | 0 .../typings/alembic/autogenerate/__init__.pyi | 9 + .../typings}/alembic/autogenerate/api.pyi | 24 ++- .../typings/alembic/autogenerate/compare.pyi | 13 ++ .../typings}/alembic/autogenerate/render.pyi | 0 .../alembic/autogenerate/rewriter.pyi | 21 +- .../database/typings}/alembic/command.pyi | 17 +- .../database/typings}/alembic/config.pyi | 18 +- .../database/typings}/alembic/context.pyi | 168 +++------------- .../typings}/alembic/ddl/__init__.pyi | 2 +- src/database/typings/alembic/ddl/_autogen.pyi | 162 +++++++++++++++ .../database/typings}/alembic/ddl/base.pyi | 0 .../database/typings}/alembic/ddl/impl.pyi | 44 +++- .../database/typings}/alembic/ddl/mssql.pyi | 9 +- .../database/typings}/alembic/ddl/mysql.pyi | 0 .../database/typings}/alembic/ddl/oracle.pyi | 2 +- .../typings}/alembic/ddl/postgresql.pyi | 34 +++- .../database/typings}/alembic/ddl/sqlite.pyi | 2 +- .../database/typings}/alembic/environment.pyi | 0 .../database/typings}/alembic/migration.pyi | 0 .../database/typings}/alembic/op.pyi | 103 +++++----- .../typings}/alembic/operations/__init__.pyi | 4 +- .../typings}/alembic/operations/base.pyi | 117 +++++------ .../typings}/alembic/operations/batch.pyi | 6 +- .../typings}/alembic/operations/ops.pyi | 145 +++++++------- .../typings}/alembic/operations/schemaobj.pyi | 0 .../typings}/alembic/operations/toimpl.pyi | 0 .../typings}/alembic/runtime/__init__.pyi | 0 .../typings}/alembic/runtime/environment.pyi | 188 +++++++----------- .../typings}/alembic/runtime/migration.pyi | 15 +- .../typings}/alembic/script/__init__.pyi | 0 .../database/typings}/alembic/script/base.pyi | 22 +- .../typings}/alembic/script/revision.pyi | 24 ++- .../typings}/alembic/script/write_hooks.pyi | 6 +- .../typings}/alembic/testing/__init__.pyi | 0 .../typings}/alembic/testing/assertions.pyi | 2 +- .../database/typings}/alembic/testing/env.pyi | 0 .../typings}/alembic/testing/fixtures.pyi | 4 +- .../typings}/alembic/testing/util.pyi | 4 +- .../typings/alembic/util/__init__.pyi | 13 ++ src/database/typings/alembic/util/compat.pyi | 40 ++++ .../database/typings}/alembic/util/editor.pyi | 0 .../database/typings}/alembic/util/exc.pyi | 0 .../typings}/alembic/util/langhelpers.pyi | 22 +- .../typings}/alembic/util/messaging.pyi | 6 +- .../typings}/alembic/util/pyfiles.pyi | 9 +- .../typings}/alembic/util/sqla_compat.pyi | 27 ++- typings/alembic | 1 + typings/alembic/autogenerate/__init__.pyi | 9 - typings/alembic/autogenerate/compare.pyi | 63 ------ typings/alembic/util/__init__.pyi | 13 -- typings/alembic/util/compat.pyi | 30 --- 53 files changed, 725 insertions(+), 674 deletions(-) rename {typings => src/database/typings}/alembic/__init__.pyi (90%) rename {typings => src/database/typings}/alembic/__main__.pyi (100%) create mode 100644 src/database/typings/alembic/autogenerate/__init__.pyi rename {typings => src/database/typings}/alembic/autogenerate/api.pyi (89%) create mode 100644 src/database/typings/alembic/autogenerate/compare.pyi rename {typings => src/database/typings}/alembic/autogenerate/render.pyi (100%) rename {typings => src/database/typings}/alembic/autogenerate/rewriter.pyi (76%) rename {typings => src/database/typings}/alembic/command.pyi (89%) rename {typings => src/database/typings}/alembic/config.pyi (93%) rename {typings => src/database/typings}/alembic/context.pyi (87%) rename {typings => src/database/typings}/alembic/ddl/__init__.pyi (71%) create mode 100644 src/database/typings/alembic/ddl/_autogen.pyi rename {typings => src/database/typings}/alembic/ddl/base.pyi (100%) rename {typings => src/database/typings}/alembic/ddl/impl.pyi (81%) rename {typings => src/database/typings}/alembic/ddl/mssql.pyi (90%) rename {typings => src/database/typings}/alembic/ddl/mysql.pyi (100%) rename {typings => src/database/typings}/alembic/ddl/oracle.pyi (97%) rename {typings => src/database/typings}/alembic/ddl/postgresql.pyi (79%) rename {typings => src/database/typings}/alembic/ddl/sqlite.pyi (97%) rename {typings => src/database/typings}/alembic/environment.pyi (100%) rename {typings => src/database/typings}/alembic/migration.pyi (100%) rename {typings => src/database/typings}/alembic/op.pyi (92%) rename {typings => src/database/typings}/alembic/operations/__init__.pyi (69%) rename {typings => src/database/typings}/alembic/operations/base.pyi (93%) rename {typings => src/database/typings}/alembic/operations/batch.pyi (96%) rename {typings => src/database/typings}/alembic/operations/ops.pyi (92%) rename {typings => src/database/typings}/alembic/operations/schemaobj.pyi (100%) rename {typings => src/database/typings}/alembic/operations/toimpl.pyi (100%) rename {typings => src/database/typings}/alembic/runtime/__init__.pyi (100%) rename {typings => src/database/typings}/alembic/runtime/environment.pyi (89%) rename {typings => src/database/typings}/alembic/runtime/migration.pyi (98%) rename {typings => src/database/typings}/alembic/script/__init__.pyi (100%) rename {typings => src/database/typings}/alembic/script/base.pyi (90%) rename {typings => src/database/typings}/alembic/script/revision.pyi (87%) rename {typings => src/database/typings}/alembic/script/write_hooks.pyi (83%) rename {typings => src/database/typings}/alembic/testing/__init__.pyi (100%) rename {typings => src/database/typings}/alembic/testing/assertions.pyi (94%) rename {typings => src/database/typings}/alembic/testing/env.pyi (100%) rename {typings => src/database/typings}/alembic/testing/fixtures.pyi (92%) rename {typings => src/database/typings}/alembic/testing/util.pyi (89%) create mode 100644 src/database/typings/alembic/util/__init__.pyi create mode 100644 src/database/typings/alembic/util/compat.pyi rename {typings => src/database/typings}/alembic/util/editor.pyi (100%) rename {typings => src/database/typings}/alembic/util/exc.pyi (100%) rename {typings => src/database/typings}/alembic/util/langhelpers.pyi (57%) rename {typings => src/database/typings}/alembic/util/messaging.pyi (84%) rename {typings => src/database/typings}/alembic/util/pyfiles.pyi (73%) rename {typings => src/database/typings}/alembic/util/sqla_compat.pyi (77%) create mode 120000 typings/alembic delete mode 100644 typings/alembic/autogenerate/__init__.pyi delete mode 100644 typings/alembic/autogenerate/compare.pyi delete mode 100644 typings/alembic/util/__init__.pyi delete mode 100644 typings/alembic/util/compat.pyi diff --git a/typings/alembic/__init__.pyi b/src/database/typings/alembic/__init__.pyi similarity index 90% rename from typings/alembic/__init__.pyi rename to src/database/typings/alembic/__init__.pyi index c8d63214..cb635d53 100644 --- a/typings/alembic/__init__.pyi +++ b/src/database/typings/alembic/__init__.pyi @@ -2,7 +2,6 @@ This type stub file was generated by pyright. """ -import sys from . import context, op __version__ = ... diff --git a/typings/alembic/__main__.pyi b/src/database/typings/alembic/__main__.pyi similarity index 100% rename from typings/alembic/__main__.pyi rename to src/database/typings/alembic/__main__.pyi diff --git a/src/database/typings/alembic/autogenerate/__init__.pyi b/src/database/typings/alembic/autogenerate/__init__.pyi new file mode 100644 index 00000000..bff3dfff --- /dev/null +++ b/src/database/typings/alembic/autogenerate/__init__.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +from .api import RevisionContext as RevisionContext, _render_migration_diffs as _render_migration_diffs, compare_metadata as compare_metadata, produce_migrations as produce_migrations, render_python_code as render_python_code +from .compare import _produce_net_changes as _produce_net_changes, comparators as comparators +from .render import render_op_text as render_op_text, renderers as renderers +from .rewriter import Rewriter as Rewriter + diff --git a/typings/alembic/autogenerate/api.pyi b/src/database/typings/alembic/autogenerate/api.pyi similarity index 89% rename from typings/alembic/autogenerate/api.pyi rename to src/database/typings/alembic/autogenerate/api.pyi index e1f90a5f..57092137 100644 --- a/typings/alembic/autogenerate/api.pyi +++ b/src/database/typings/alembic/autogenerate/api.pyi @@ -2,15 +2,17 @@ This type stub file was generated by pyright. """ -from typing import Any, Callable, Dict, Iterator, Optional, Sequence, Set, TYPE_CHECKING, Union +from typing import Any, Dict, Iterator, List, Optional, Sequence, Set, TYPE_CHECKING, Union from .. import util +from ..util import sqla_compat from sqlalchemy.engine import Connection, Dialect, Inspector -from sqlalchemy.sql.schema import MetaData, SchemaItem +from sqlalchemy.sql.schema import MetaData, SchemaItem, Table from ..config import Config from ..operations.ops import DowngradeOps, MigrationScript, UpgradeOps -from ..runtime.environment import NameFilterParentNames, NameFilterType, RenderItemFn +from ..runtime.environment import NameFilterParentNames, NameFilterType, ProcessRevisionDirectiveFn, RenderItemFn from ..runtime.migration import MigrationContext from ..script.base import Script, ScriptDirectory +from ..script.revision import _GetRevArg if TYPE_CHECKING: ... @@ -182,7 +184,7 @@ class AutogenContext: dialect: Optional[Dialect] = ... imports: Set[str] = ... migration_context: MigrationContext = ... - def __init__(self, migration_context: MigrationContext, metadata: Optional[MetaData] = ..., opts: Optional[dict] = ..., autogenerate: bool = ...) -> None: + def __init__(self, migration_context: MigrationContext, metadata: Optional[MetaData] = ..., opts: Optional[Dict[str, Any]] = ..., autogenerate: bool = ...) -> None: ... @util.memoized_property @@ -202,7 +204,7 @@ class AutogenContext: """ ... - def run_object_filters(self, object_: SchemaItem, name: Optional[str], type_: NameFilterType, reflected: bool, compare_to: Optional[SchemaItem]) -> bool: + def run_object_filters(self, object_: SchemaItem, name: sqla_compat._ConstraintName, type_: NameFilterType, reflected: bool, compare_to: Optional[SchemaItem]) -> bool: """Run the context's object filters and return True if the targets should be part of the autogenerate operation. @@ -217,7 +219,7 @@ class AutogenContext: run_filters = ... @util.memoized_property - def sorted_tables(self): # -> list[Unknown]: + def sorted_tables(self) -> List[Table]: """Return an aggregate of the :attr:`.MetaData.sorted_tables` collection(s). @@ -230,7 +232,7 @@ class AutogenContext: ... @util.memoized_property - def table_key_to_table(self): # -> dict[Unknown, Unknown]: + def table_key_to_table(self) -> Dict[str, Table]: """Return an aggregate of the :attr:`.MetaData.tables` dictionaries. The :attr:`.MetaData.tables` collection is a dictionary of table key @@ -248,13 +250,15 @@ class AutogenContext: class RevisionContext: """Maintains configuration and state that's specific to a revision file generation operation.""" - def __init__(self, config: Config, script_directory: ScriptDirectory, command_args: Dict[str, Any], process_revision_directives: Optional[Callable] = ...) -> None: + generated_revisions: List[MigrationScript] + process_revision_directives: Optional[ProcessRevisionDirectiveFn] + def __init__(self, config: Config, script_directory: ScriptDirectory, command_args: Dict[str, Any], process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> None: ... - def run_autogenerate(self, rev: tuple, migration_context: MigrationContext) -> None: + def run_autogenerate(self, rev: _GetRevArg, migration_context: MigrationContext) -> None: ... - def run_no_autogenerate(self, rev: tuple, migration_context: MigrationContext) -> None: + def run_no_autogenerate(self, rev: _GetRevArg, migration_context: MigrationContext) -> None: ... def generate_scripts(self) -> Iterator[Optional[Script]]: diff --git a/src/database/typings/alembic/autogenerate/compare.pyi b/src/database/typings/alembic/autogenerate/compare.pyi new file mode 100644 index 00000000..9e5668cf --- /dev/null +++ b/src/database/typings/alembic/autogenerate/compare.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, Mapping, TYPE_CHECKING, TypeVar, Union +from sqlalchemy.sql.schema import ForeignKeyConstraint, Index, UniqueConstraint + +if TYPE_CHECKING: + ... +log = ... +comparators = ... +_IndexColumnSortingOps: Mapping[str, Any] = ... +_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) diff --git a/typings/alembic/autogenerate/render.pyi b/src/database/typings/alembic/autogenerate/render.pyi similarity index 100% rename from typings/alembic/autogenerate/render.pyi rename to src/database/typings/alembic/autogenerate/render.pyi diff --git a/typings/alembic/autogenerate/rewriter.pyi b/src/database/typings/alembic/autogenerate/rewriter.pyi similarity index 76% rename from typings/alembic/autogenerate/rewriter.pyi rename to src/database/typings/alembic/autogenerate/rewriter.pyi index 0936ce63..ed4efa76 100644 --- a/typings/alembic/autogenerate/rewriter.pyi +++ b/src/database/typings/alembic/autogenerate/rewriter.pyi @@ -2,13 +2,14 @@ This type stub file was generated by pyright. """ -from typing import Callable, List, Optional, TYPE_CHECKING, Type, Union -from alembic.operations.ops import AddColumnOp, AlterColumnOp, CreateTableOp, MigrateOperation, MigrationScript, ModifyTableOps -from alembic.runtime.migration import MigrationContext -from alembic.script.revision import Revision +from typing import Any, Callable, List, TYPE_CHECKING, Tuple, Type, Union +from ..operations.ops import AddColumnOp, AlterColumnOp, CreateTableOp, MigrateOperation, MigrationScript, ModifyTableOps +from ..runtime.migration import MigrationContext +from ..script.revision import _GetRevArg if TYPE_CHECKING: ... +ProcessRevisionDirectiveFn = Callable[["MigrationContext", "_GetRevArg", List["MigrationScript"]], None] class Rewriter: """A helper object that allows easy 'rewriting' of ops streams. @@ -35,14 +36,14 @@ class Rewriter: """ _traverse = ... - _chained: Optional[Rewriter] = ... + _chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = ... def __init__(self) -> None: ... - def chain(self, other: Rewriter) -> Rewriter: + def chain(self, other: Union[ProcessRevisionDirectiveFn, Rewriter,]) -> Rewriter: """Produce a "chain" of this :class:`.Rewriter` to another. - This allows two rewriters to operate serially on a stream, + This allows two or more rewriters to operate serially on a stream, e.g.:: writer1 = autogenerate.Rewriter() @@ -71,7 +72,7 @@ class Rewriter: """ ... - def rewrites(self, operator: Union[Type[AddColumnOp], Type[MigrateOperation], Type[AlterColumnOp], Type[CreateTableOp], Type[ModifyTableOps],]) -> Callable: + def rewrites(self, operator: Union[Type[AddColumnOp], Type[MigrateOperation], Type[AlterColumnOp], Type[CreateTableOp], Type[ModifyTableOps],]) -> Callable[..., Any]: """Register a function as rewriter for a given type. The function should receive three arguments, which are @@ -86,10 +87,10 @@ class Rewriter: """ ... - def __call__(self, context: MigrationContext, revision: Revision, directives: List[MigrationScript]) -> None: + def __call__(self, context: MigrationContext, revision: _GetRevArg, directives: List[MigrationScript]) -> None: ... - def process_revision_directives(self, context: MigrationContext, revision: Revision, directives: List[MigrationScript]) -> None: + def process_revision_directives(self, context: MigrationContext, revision: _GetRevArg, directives: List[MigrationScript]) -> None: ... diff --git a/typings/alembic/command.pyi b/src/database/typings/alembic/command.pyi similarity index 89% rename from typings/alembic/command.pyi rename to src/database/typings/alembic/command.pyi index a7fa7a1d..5bf17b66 100644 --- a/typings/alembic/command.pyi +++ b/src/database/typings/alembic/command.pyi @@ -6,10 +6,11 @@ from typing import List, Optional, TYPE_CHECKING, Union from .runtime.environment import ProcessRevisionDirectiveFn from alembic.config import Config from alembic.script.base import Script +from alembic.script.revision import _RevIdType if TYPE_CHECKING: ... -def list_templates(config: Config): # -> None: +def list_templates(config: Config) -> None: """List available templates. :param config: a :class:`.Config` object. @@ -30,13 +31,10 @@ def init(config: Config, directory: str, template: str = ..., package: bool = .. :param package: when True, write ``__init__.py`` files into the environment location as well as the versions/ location. - .. versionadded:: 1.2 - - """ ... -def revision(config: Config, message: Optional[str] = ..., autogenerate: bool = ..., sql: bool = ..., head: str = ..., splice: bool = ..., branch_label: Optional[str] = ..., version_path: Optional[str] = ..., rev_id: Optional[str] = ..., depends_on: Optional[str] = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> Union[Optional[Script], List[Optional[Script]]]: +def revision(config: Config, message: Optional[str] = ..., autogenerate: bool = ..., sql: bool = ..., head: str = ..., splice: bool = ..., branch_label: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., rev_id: Optional[str] = ..., depends_on: Optional[str] = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> Union[Optional[Script], List[Optional[Script]]]: """Create a new revision file. :param config: a :class:`.Config` object. @@ -93,7 +91,7 @@ def check(config: Config) -> None: """ ... -def merge(config: Config, revisions: str, message: Optional[str] = ..., branch_label: Optional[str] = ..., rev_id: Optional[str] = ...) -> Optional[Script]: +def merge(config: Config, revisions: _RevIdType, message: Optional[str] = ..., branch_label: Optional[_RevIdType] = ..., rev_id: Optional[str] = ...) -> Optional[Script]: """Merge two revisions together. Creates a new migration file. :param config: a :class:`.Config` instance @@ -200,7 +198,7 @@ def current(config: Config, verbose: bool = ...) -> None: """ ... -def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = ..., purge: bool = ...) -> None: +def stamp(config: Config, revision: _RevIdType, sql: bool = ..., tag: Optional[str] = ..., purge: bool = ...) -> None: """'stamp' the revision table with the given revision; don't run any migrations. @@ -212,9 +210,6 @@ def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = . .. note:: this parameter is called "revisions" in the command line interface. - .. versionchanged:: 1.2 The revision may be a single revision or - list of revisions when stamping multiple branch heads. - :param sql: use ``--sql`` mode :param tag: an arbitrary "tag" that can be intercepted by custom @@ -223,8 +218,6 @@ def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = . :param purge: delete all entries in the version table before stamping. - .. versionadded:: 1.2 - """ ... diff --git a/typings/alembic/config.pyi b/src/database/typings/alembic/config.pyi similarity index 93% rename from typings/alembic/config.pyi rename to src/database/typings/alembic/config.pyi index a986a131..e0ae27da 100644 --- a/typings/alembic/config.pyi +++ b/src/database/typings/alembic/config.pyi @@ -2,10 +2,10 @@ This type stub file was generated by pyright. """ -from configparser import ConfigParser import os from argparse import Namespace -from typing import Any, Dict, Mapping, Optional, TextIO, Union, overload +from configparser import ConfigParser +from typing import Any, Dict, Mapping, Optional, Sequence, TextIO, Union, overload from typing_extensions import TypedDict from . import util @@ -20,7 +20,7 @@ class Config: some_param = context.config.get_main_option("my option") - When invoking Alembic programatically, a new + When invoking Alembic programmatically, a new :class:`.Config` can be created by passing the name of an .ini file to the constructor:: @@ -81,7 +81,7 @@ class Config: :ref:`connection_sharing` """ - def __init__(self, file_: Union[str, os.PathLike[str], None] = ..., ini_section: str = ..., output_buffer: Optional[TextIO] = ..., stdout: TextIO = ..., cmd_opts: Optional[Namespace] = ..., config_args: Mapping[str, Any] = ..., attributes: Optional[dict] = ...) -> None: + def __init__(self, file_: Union[str, os.PathLike[str], None] = ..., ini_section: str = ..., output_buffer: Optional[TextIO] = ..., stdout: TextIO = ..., cmd_opts: Optional[Namespace] = ..., config_args: Mapping[str, Any] = ..., attributes: Optional[Dict[str, Any]] = ...) -> None: """Construct a new :class:`.Config`""" ... @@ -89,7 +89,7 @@ class Config: config_file_name: Union[str, os.PathLike[str], None] = ... config_ini_section: str = ... @util.memoized_property - def attributes(self): -> dict[Any, Any]: + def attributes(self) -> Dict[str, Any]: """A Python dictionary for storage of additional state. @@ -108,7 +108,7 @@ class Config: """ ... - def print_stdout(self, text: str, *arg) -> None: + def print_stdout(self, text: str, *arg: Any) -> None: """Render a message to standard out. When :meth:`.Config.print_stdout` is called with additional args @@ -221,7 +221,7 @@ class Config: def get_main_option(self, name: str, default: Optional[str] = ...) -> Optional[str]: ... - def get_main_option(self, name, default=...): # -> str | None: + def get_main_option(self, name: str, default: Optional[str] = ...) -> Optional[str]: """Return an option from the 'main' section of the .ini file. This defaults to being a key from the ``[alembic]`` @@ -250,12 +250,12 @@ class CommandLine: def run_cmd(self, config: Config, options: Namespace) -> None: ... - def main(self, argv=...): # -> None: + def main(self, argv: Optional[Sequence[str]] = ...) -> None: ... -def main(argv=..., prog=..., **kwargs): # -> None: +def main(argv: Optional[Sequence[str]] = ..., prog: Optional[str] = ..., **kwargs: Any) -> None: """The console runner function for Alembic.""" ... diff --git a/typings/alembic/context.pyi b/src/database/typings/alembic/context.pyi similarity index 87% rename from typings/alembic/context.pyi rename to src/database/typings/alembic/context.pyi index b6404878..5f74e561 100644 --- a/typings/alembic/context.pyi +++ b/src/database/typings/alembic/context.pyi @@ -2,38 +2,20 @@ This type stub file was generated by pyright. """ -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Collection, - ContextManager, - Dict, - List, - Literal, - Mapping, - MutableMapping, - Optional, - TextIO, - Tuple, - Union, - overload, -) - -from sqlalchemy import Table +from typing import Any, Callable, Collection, ContextManager, Dict, Iterable, List, Literal, Mapping, MutableMapping, Optional, Sequence, TYPE_CHECKING, TextIO, Tuple, Union, overload from sqlalchemy.engine.base import Connection from sqlalchemy.engine.url import URL -from sqlalchemy.sql.elements import ClauseElement +from sqlalchemy.sql import Executable from sqlalchemy.sql.schema import Column, FetchedValue, MetaData, SchemaItem - +from sqlalchemy.sql.type_api import TypeEngine from .autogenerate.api import AutogenContext from .config import Config -from .operations.ops import MigrateOperation +from .operations.ops import MigrationScript from .runtime.migration import MigrationContext, MigrationInfo, _ProxyTransaction from .script import ScriptDirectory -if TYPE_CHECKING: ... - +if TYPE_CHECKING: + ... def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: """Return a context manager that will enclose an operation within a "transaction", @@ -80,102 +62,7 @@ def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: ... config: Config - -def configure( - connection: Optional[Connection] = ..., - url: Union[str, URL, None] = ..., - dialect_name: Optional[str] = ..., - dialect_opts: Optional[Dict[str, Any]] = ..., - transactional_ddl: Optional[bool] = ..., - transaction_per_migration: bool = ..., - output_buffer: Optional[TextIO] = ..., - starting_rev: Optional[str] = ..., - tag: Optional[str] = ..., - template_args: Optional[Dict[str, Any]] = ..., - render_as_batch: bool = ..., - # Alembic documents and supports list[MetaData] - # despite the typehint not including it in the - # library - target_metadata: Optional[MetaData | list[MetaData]] = ..., - include_name: Optional[ - Callable[ - [ - Optional[str], - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - MutableMapping[ - Literal[ - "schema_name", - "table_name", - "schema_qualified_table_name", - ], - Optional[str], - ], - ], - bool, - ] - ] = ..., - include_object: Optional[ - Callable[ - [ - Table, - str, - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - bool, - Optional[SchemaItem], - ], - bool, - ] - ] = ..., - include_schemas: bool = ..., - process_revision_directives: Optional[ - Callable[[MigrationContext, Tuple[str, str], List[MigrateOperation]], None] - ] = ..., - compare_type: bool = ..., - compare_server_default: Union[ - bool, - Callable[ - [ - MigrationContext, - Column[Any], - Column[Any], - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], - ], - ] = ..., - render_item: Optional[ - Callable[[str, Any, AutogenContext], Union[str, Literal[False]]] - ] = ..., - literal_binds: bool = ..., - upgrade_token: str = ..., - downgrade_token: str = ..., - alembic_module_prefix: str = ..., - sqlalchemy_module_prefix: str = ..., - user_module_prefix: Optional[str] = ..., - on_version_apply: Optional[ - Callable[ - [MigrationContext, MigrationInfo, Collection[Any], Mapping[str, Any]], - None, - ] - ] = ..., - **kw: Any -) -> None: +def configure(connection: Optional[Connection] = ..., url: Union[str, URL, None] = ..., dialect_name: Optional[str] = ..., dialect_opts: Optional[Dict[str, Any]] = ..., transactional_ddl: Optional[bool] = ..., transaction_per_migration: bool = ..., output_buffer: Optional[TextIO] = ..., starting_rev: Optional[str] = ..., tag: Optional[str] = ..., template_args: Optional[Dict[str, Any]] = ..., render_as_batch: bool = ..., target_metadata: Union[MetaData, Sequence[MetaData], None] = ..., include_name: Optional[Callable[[Optional[str], Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",], MutableMapping[Literal["schema_name", "table_name", "schema_qualified_table_name",], Optional[str],]], bool,]] = ..., include_object: Optional[Callable[[SchemaItem, Optional[str], Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",], bool, Optional[SchemaItem]], bool,]] = ..., include_schemas: bool = ..., process_revision_directives: Optional[Callable[[MigrationContext, Union[str, Iterable[Optional[str]], Iterable[str]], List[MigrationScript]], None,]] = ..., compare_type: Union[bool, Callable[[MigrationContext, Column[Any], Column[Any], TypeEngine[Any], TypeEngine[Any]], Optional[bool],],] = ..., compare_server_default: Union[bool, Callable[[MigrationContext, Column[Any], Column[Any], Optional[str], Optional[FetchedValue], Optional[str]], Optional[bool],],] = ..., render_item: Optional[Callable[[str, Any, AutogenContext], Union[str, Literal[False]]]] = ..., literal_binds: bool = ..., upgrade_token: str = ..., downgrade_token: str = ..., alembic_module_prefix: str = ..., sqlalchemy_module_prefix: str = ..., user_module_prefix: Optional[str] = ..., on_version_apply: Optional[Callable[[MigrationContext, MigrationInfo, Collection[Any], Mapping[str, Any]], None,]] = ..., **kw: Any) -> None: """Configure a :class:`.MigrationContext` within this :class:`.EnvironmentContext` which will provide database connectivity and other configuration to a series of @@ -220,9 +107,6 @@ def configure( ``connection`` and ``url`` are not passed. :param dialect_opts: dictionary of options to be passed to dialect constructor. - - .. versionadded:: 1.0.12 - :param transactional_ddl: Force the usage of "transactional" DDL on or off; this otherwise defaults to whether or not the dialect in @@ -305,12 +189,16 @@ def configure( to produce candidate upgrade/downgrade operations. :param compare_type: Indicates type comparison behavior during an autogenerate - operation. Defaults to ``False`` which disables type - comparison. Set to - ``True`` to turn on default type comparison, which has varied - accuracy depending on backend. See :ref:`compare_types` + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` for an example as well as information on other type - comparison options. + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. .. seealso:: @@ -394,8 +282,6 @@ def configure( include_name = include_name ) - .. versionadded:: 1.5 - .. seealso:: :ref:`autogenerate_include_hooks` @@ -611,9 +497,7 @@ def configure( """ ... -def execute( - sql: Union[ClauseElement, str], execution_options: Optional[dict] = ... -) -> None: +def execute(sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute the given SQL using the current change context. The behavior of :meth:`.execute` is the same @@ -727,9 +611,13 @@ def get_tag_argument() -> Optional[str]: ... @overload -def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ... +def get_x_argument(as_dictionary: Literal[False]) -> List[str]: + ... + @overload -def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ... +def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: + ... + @overload def get_x_argument(as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: """Return the value(s) passed for the ``-x`` argument, if any. @@ -741,7 +629,11 @@ def get_x_argument(as_dictionary: bool = ...) -> Union[List[str], Dict[str, str] The return value is a list, returned directly from the ``argparse`` structure. If ``as_dictionary=True`` is passed, the ``x`` arguments are parsed using ``key=value`` format into a dictionary that is - then returned. + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. For example, to support passing a database URL on the command line, the standard ``env.py`` script can be modified like this:: @@ -785,7 +677,7 @@ def is_offline_mode() -> bool: """ ... -def is_transactional_ddl(): +def is_transactional_ddl() -> bool: """Return True if the context is configured to expect a transactional DDL capable backend. @@ -822,7 +714,6 @@ def run_migrations(**kw: Any) -> None: ... script: ScriptDirectory - def static_output(text: str) -> None: """Emit text directly to the "offline" SQL stream. @@ -833,3 +724,4 @@ def static_output(text: str) -> None: """ ... + diff --git a/typings/alembic/ddl/__init__.pyi b/src/database/typings/alembic/ddl/__init__.pyi similarity index 71% rename from typings/alembic/ddl/__init__.pyi rename to src/database/typings/alembic/ddl/__init__.pyi index 6cc9d0d4..490cfb62 100644 --- a/typings/alembic/ddl/__init__.pyi +++ b/src/database/typings/alembic/ddl/__init__.pyi @@ -3,5 +3,5 @@ This type stub file was generated by pyright. """ from . import mssql, mysql, oracle, postgresql, sqlite -from .impl import DefaultImpl +from .impl import DefaultImpl as DefaultImpl diff --git a/src/database/typings/alembic/ddl/_autogen.pyi b/src/database/typings/alembic/ddl/_autogen.pyi new file mode 100644 index 00000000..a9ada48a --- /dev/null +++ b/src/database/typings/alembic/ddl/_autogen.pyi @@ -0,0 +1,162 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, ClassVar, Dict, Generic, Literal, NamedTuple, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union +from sqlalchemy.sql.schema import Constraint, ForeignKeyConstraint, Index, UniqueConstraint +from typing_extensions import TypeGuard +from .. import util +from ..util import sqla_compat +from alembic.autogenerate.api import AutogenContext +from alembic.ddl.impl import DefaultImpl + +if TYPE_CHECKING: + ... +CompareConstraintType = Union[Constraint, Index] +_C = TypeVar("_C", bound=CompareConstraintType) +_clsreg: Dict[str, Type[_constraint_sig]] = ... +class ComparisonResult(NamedTuple): + status: Literal["equal", "different", "skip"] + message: str + @property + def is_equal(self) -> bool: + ... + + @property + def is_different(self) -> bool: + ... + + @property + def is_skip(self) -> bool: + ... + + @classmethod + def Equal(cls) -> ComparisonResult: + """the constraints are equal.""" + ... + + @classmethod + def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraints are different for the provided reason(s).""" + ... + + @classmethod + def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraint cannot be compared for the provided reason(s). + + The message is logged, but the constraints will be otherwise + considered equal, meaning that no migration command will be + generated. + """ + ... + + + +class _constraint_sig(Generic[_C]): + const: _C + _sig: Tuple[Any, ...] + name: Optional[sqla_compat._ConstraintNameDefined] + impl: DefaultImpl + _is_index: ClassVar[bool] = ... + _is_fk: ClassVar[bool] = ... + _is_uq: ClassVar[bool] = ... + _is_metadata: bool + def __init_subclass__(cls) -> None: + ... + + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: _C) -> None: + ... + + def compare_to_reflected(self, other: _constraint_sig[Any]) -> ComparisonResult: + ... + + @classmethod + def from_constraint(cls, is_metadata: bool, impl: DefaultImpl, constraint: _C) -> _constraint_sig[_C]: + ... + + def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: + ... + + @util.memoized_property + def is_named(self): # -> bool: + ... + + @util.memoized_property + def unnamed(self) -> Tuple[Any, ...]: + ... + + @util.memoized_property + def unnamed_no_options(self) -> Tuple[Any, ...]: + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + def __hash__(self) -> int: + ... + + + +class _uq_constraint_sig(_constraint_sig[UniqueConstraint]): + _is_uq = ... + is_unique = ... + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: UniqueConstraint) -> None: + ... + + @property + def column_names(self) -> Tuple[str, ...]: + ... + + + +class _ix_constraint_sig(_constraint_sig[Index]): + _is_index = ... + name: sqla_compat._ConstraintName + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: Index) -> None: + ... + + @util.memoized_property + def has_expressions(self): # -> bool: + ... + + @util.memoized_property + def column_names(self) -> Tuple[str, ...]: + ... + + @util.memoized_property + def column_names_optional(self) -> Tuple[Optional[str], ...]: + ... + + @util.memoized_property + def is_named(self): # -> Literal[True]: + ... + + @util.memoized_property + def unnamed(self): # -> tuple[bool, *tuple[str | None, ...]]: + ... + + + +class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]): + _is_fk = ... + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: ForeignKeyConstraint) -> None: + ... + + @util.memoized_property + def unnamed_no_options(self): # -> tuple[Any, Any, tuple[Any, ...], Any, Any, tuple[Any, ...]]: + ... + + + +def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]: + ... + +def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]: + ... + +def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]: + ... + diff --git a/typings/alembic/ddl/base.pyi b/src/database/typings/alembic/ddl/base.pyi similarity index 100% rename from typings/alembic/ddl/base.pyi rename to src/database/typings/alembic/ddl/base.pyi diff --git a/typings/alembic/ddl/impl.pyi b/src/database/typings/alembic/ddl/impl.pyi similarity index 81% rename from typings/alembic/ddl/impl.pyi rename to src/database/typings/alembic/ddl/impl.pyi index fc2fca8d..7579c7ec 100644 --- a/typings/alembic/ddl/impl.pyi +++ b/src/database/typings/alembic/ddl/impl.pyi @@ -2,9 +2,11 @@ This type stub file was generated by pyright. """ -from typing import Any, Dict, List, Literal, Optional, Sequence, Set, TYPE_CHECKING, TextIO, Tuple, Type, Union +from typing import Any, Dict, List, Literal, NamedTuple, Optional, Sequence, Set, TYPE_CHECKING, TextIO, Tuple, Type, Union +from ._autogen import ComparisonResult as ComparisonResult from sqlalchemy.engine import Connection, Dialect -from sqlalchemy.sql.elements import ClauseElement, ColumnElement, quoted_name +from sqlalchemy.sql import ClauseElement, Executable +from sqlalchemy.sql.elements import ColumnElement, quoted_name from sqlalchemy.sql.schema import Column, Constraint, ForeignKeyConstraint, Index, Table, UniqueConstraint from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine @@ -14,6 +16,7 @@ from ..operations.batch import ApplyBatchImpl, BatchOperationsImpl if TYPE_CHECKING: ... +log = ... class ImplMeta(type): def __init__(cls, classname: str, bases: Tuple[Type[DefaultImpl]], dict_: Dict[str, Any]) -> None: ... @@ -21,7 +24,6 @@ class ImplMeta(type): _impls: Dict[str, Type[DefaultImpl]] = ... -Params = ... class DefaultImpl(metaclass=ImplMeta): """Provide the entrypoint for major migration operations, including database-specific behavioral variances. @@ -75,7 +77,7 @@ class DefaultImpl(metaclass=ImplMeta): def bind(self) -> Optional[Connection]: ... - def execute(self, sql: Union[ClauseElement, str], execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sql: Union[Executable, str], execution_options: Optional[dict[str, Any]] = ...) -> None: ... def alter_column(self, table_name: str, column_name: str, nullable: Optional[bool] = ..., server_default: Union[_ServerDefault, Literal[False]] = ..., name: Optional[str] = ..., type_: Optional[TypeEngine] = ..., schema: Optional[str] = ..., autoincrement: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., existing_comment: Optional[str] = ..., existing_type: Optional[TypeEngine] = ..., existing_server_default: Optional[_ServerDefault] = ..., existing_nullable: Optional[bool] = ..., existing_autoincrement: Optional[bool] = ..., **kw: Any) -> None: @@ -102,7 +104,7 @@ class DefaultImpl(metaclass=ImplMeta): def drop_table(self, table: Table) -> None: ... - def create_index(self, index: Index) -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def create_table_comment(self, table: Table) -> None: @@ -114,7 +116,7 @@ class DefaultImpl(metaclass=ImplMeta): def create_column_comment(self, column: ColumnElement[Any]) -> None: ... - def drop_index(self, index: Index) -> None: + def drop_index(self, index: Index, **kw: Any) -> None: ... def bulk_insert(self, table: Union[TableClause, Table], rows: List[dict], multiinsert: bool = ...) -> None: @@ -140,8 +142,6 @@ class DefaultImpl(metaclass=ImplMeta): """Render a SQL expression that is typically a server default, index expression, etc. - .. versionadded:: 1.0.11 - """ ... @@ -190,8 +190,34 @@ class DefaultImpl(metaclass=ImplMeta): def render_type(self, type_obj: TypeEngine, autogen_context: AutogenContext) -> Union[str, Literal[False]]: ... - def create_index_sig(self, index: Index) -> Tuple[Any, ...]: + def compare_indexes(self, metadata_index: Index, reflected_index: Index) -> ComparisonResult: + """Compare two indexes by comparing the signature generated by + ``create_index_sig``. + + This method returns a ``ComparisonResult``. + """ ... + def compare_unique_constraint(self, metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) -> ComparisonResult: + """Compare two unique constraints by comparing the two signatures. + + The arguments are two tuples that contain the unique constraint and + the signatures generated by ``create_unique_constraint_sig``. + + This method returns a ``ComparisonResult``. + """ + ... + + def adjust_reflected_dialect_options(self, reflected_object: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + + + +class Params(NamedTuple): + token0: str + tokens: List[str] + args: List[str] + kwargs: Dict[str, str] + ... diff --git a/typings/alembic/ddl/mssql.pyi b/src/database/typings/alembic/ddl/mssql.pyi similarity index 90% rename from typings/alembic/ddl/mssql.pyi rename to src/database/typings/alembic/ddl/mssql.pyi index cd43ddfa..76c040bb 100644 --- a/typings/alembic/ddl/mssql.pyi +++ b/src/database/typings/alembic/ddl/mssql.pyi @@ -2,13 +2,13 @@ This type stub file was generated by pyright. """ -from typing import Any, List, Literal, Optional, TYPE_CHECKING, Union -from sqlalchemy.ext.compiler import compiles +from typing import Any, Dict, List, Literal, Optional, TYPE_CHECKING, Union from sqlalchemy.schema import Column from sqlalchemy.sql.base import Executable from sqlalchemy.sql.elements import ClauseElement from .base import AddColumn, ColumnDefault, ColumnName, ColumnNullable, ColumnType, RenameTable, _ServerDefault from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.dialects.mssql.base import MSDDLCompiler from sqlalchemy.sql.schema import Index, Table from sqlalchemy.sql.selectable import TableClause @@ -34,7 +34,7 @@ class MSSQLImpl(DefaultImpl): def alter_column(self, table_name: str, column_name: str, nullable: Optional[bool] = ..., server_default: Optional[Union[_ServerDefault, Literal[False]]] = ..., name: Optional[str] = ..., type_: Optional[TypeEngine] = ..., schema: Optional[str] = ..., existing_type: Optional[TypeEngine] = ..., existing_server_default: Optional[_ServerDefault] = ..., existing_nullable: Optional[bool] = ..., **kw: Any) -> None: ... - def create_index(self, index: Index) -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def bulk_insert(self, table: Union[TableClause, Table], rows: List[dict], **kw: Any) -> None: @@ -46,6 +46,9 @@ class MSSQLImpl(DefaultImpl): def compare_server_default(self, inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default): # -> bool: ... + def adjust_reflected_dialect_options(self, reflected_object: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + class _ExecDropConstraint(Executable, ClauseElement): diff --git a/typings/alembic/ddl/mysql.pyi b/src/database/typings/alembic/ddl/mysql.pyi similarity index 100% rename from typings/alembic/ddl/mysql.pyi rename to src/database/typings/alembic/ddl/mysql.pyi diff --git a/typings/alembic/ddl/oracle.pyi b/src/database/typings/alembic/ddl/oracle.pyi similarity index 97% rename from typings/alembic/ddl/oracle.pyi rename to src/database/typings/alembic/ddl/oracle.pyi index 67f54112..d29e2b5b 100644 --- a/typings/alembic/ddl/oracle.pyi +++ b/src/database/typings/alembic/ddl/oracle.pyi @@ -3,9 +3,9 @@ This type stub file was generated by pyright. """ from typing import Any, TYPE_CHECKING -from sqlalchemy.ext.compiler import compiles from .base import AddColumn, ColumnComment, ColumnDefault, ColumnName, ColumnNullable, ColumnType, IdentityColumnDefault, RenameTable from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.dialects.oracle.base import OracleDDLCompiler from sqlalchemy.sql.schema import Column diff --git a/typings/alembic/ddl/postgresql.pyi b/src/database/typings/alembic/ddl/postgresql.pyi similarity index 79% rename from typings/alembic/ddl/postgresql.pyi rename to src/database/typings/alembic/ddl/postgresql.pyi index cc0b228d..99b93987 100644 --- a/typings/alembic/ddl/postgresql.pyi +++ b/src/database/typings/alembic/ddl/postgresql.pyi @@ -2,15 +2,16 @@ This type stub file was generated by pyright. """ -from typing import Any, Literal, Optional, Sequence, TYPE_CHECKING, Tuple, Union -from sqlalchemy import Index +from typing import Any, Dict, Literal, Optional, Sequence, TYPE_CHECKING, Tuple, Union +from sqlalchemy import Index, UniqueConstraint from sqlalchemy.dialects.postgresql import ExcludeConstraint -from sqlalchemy.sql.elements import BinaryExpression, ColumnClause, quoted_name -from .base import AlterColumn, ColumnComment, IdentityColumnDefault, RenameTable, _ServerDefault, compiles -from .impl import DefaultImpl +from sqlalchemy.sql.elements import ClauseElement, ColumnClause, ColumnElement, quoted_name +from .base import AlterColumn, ColumnComment, IdentityColumnDefault, RenameTable, _ServerDefault +from .impl import ComparisonResult, DefaultImpl from ..operations import ops from ..operations.base import BatchOperations, Operations from ..util import sqla_compat +from ..util.sqla_compat import compiles from sqlalchemy.dialects.postgresql.base import PGDDLCompiler from sqlalchemy.sql.schema import Table from sqlalchemy.sql.type_api import TypeEngine @@ -24,8 +25,7 @@ class PostgresqlImpl(DefaultImpl): __dialect__ = ... transactional_ddl = ... type_synonyms = ... - identity_attrs_ignore = ... - def create_index(self, index): # -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def prep_table_for_batch(self, batch_impl, table): # -> None: @@ -43,7 +43,21 @@ class PostgresqlImpl(DefaultImpl): def correct_for_autogen_constraints(self, conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes): # -> None: ... - def create_index_sig(self, index: Index) -> Tuple[Any, ...]: + _default_modifiers_re = ... + def compare_indexes(self, metadata_index: Index, reflected_index: Index) -> ComparisonResult: + ... + + def compare_unique_constraint(self, metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) -> ComparisonResult: + ... + + def adjust_reflected_dialect_options(self, reflected_options: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + + def render_ddl_sql_expr(self, expr: ClauseElement, is_server_default: bool = ..., is_index: bool = ..., **kw: Any) -> str: + """Render a SQL expression that is typically a server default, + index expression, etc. + + """ ... def render_type(self, type_: TypeEngine, autogen_context: AutogenContext) -> Union[str, Literal[False]]: @@ -79,7 +93,7 @@ def visit_identity_column(element: IdentityColumnDefault, compiler: PGDDLCompile class CreateExcludeConstraintOp(ops.AddConstraintOp): """Represent a create exclude constraint operation.""" constraint_type = ... - def __init__(self, constraint_name: sqla_compat._ConstraintName, table_name: Union[str, quoted_name], elements: Union[Sequence[Tuple[str, str]], Sequence[Tuple[ColumnClause[Any], str]],], where: Optional[Union[BinaryExpression, str]] = ..., schema: Optional[str] = ..., _orig_constraint: Optional[ExcludeConstraint] = ..., **kw) -> None: + def __init__(self, constraint_name: sqla_compat._ConstraintName, table_name: Union[str, quoted_name], elements: Union[Sequence[Tuple[str, str]], Sequence[Tuple[ColumnClause[Any], str]],], where: Optional[Union[ColumnElement[bool], str]] = ..., schema: Optional[str] = ..., _orig_constraint: Optional[ExcludeConstraint] = ..., **kw) -> None: ... @classmethod @@ -128,7 +142,7 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp): ... @classmethod - def batch_create_exclude_constraint(cls, operations: BatchOperations, constraint_name: str, *elements: Any, **kw: Any): # -> Any: + def batch_create_exclude_constraint(cls, operations: BatchOperations, constraint_name: str, *elements: Any, **kw: Any) -> Optional[Table]: """Issue a "create exclude constraint" instruction using the current batch migration context. diff --git a/typings/alembic/ddl/sqlite.pyi b/src/database/typings/alembic/ddl/sqlite.pyi similarity index 97% rename from typings/alembic/ddl/sqlite.pyi rename to src/database/typings/alembic/ddl/sqlite.pyi index 039835a2..1787b38b 100644 --- a/typings/alembic/ddl/sqlite.pyi +++ b/src/database/typings/alembic/ddl/sqlite.pyi @@ -3,9 +3,9 @@ This type stub file was generated by pyright. """ from typing import Any, Dict, Optional, TYPE_CHECKING, Union -from sqlalchemy.ext.compiler import compiles from .base import RenameTable from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.engine.reflection import Inspector from sqlalchemy.sql.compiler import DDLCompiler from sqlalchemy.sql.elements import Cast, ClauseElement diff --git a/typings/alembic/environment.pyi b/src/database/typings/alembic/environment.pyi similarity index 100% rename from typings/alembic/environment.pyi rename to src/database/typings/alembic/environment.pyi diff --git a/typings/alembic/migration.pyi b/src/database/typings/alembic/migration.pyi similarity index 100% rename from typings/alembic/migration.pyi rename to src/database/typings/alembic/migration.pyi diff --git a/typings/alembic/op.pyi b/src/database/typings/alembic/op.pyi similarity index 92% rename from typings/alembic/op.pyi rename to src/database/typings/alembic/op.pyi index 835d2a9d..75783e36 100644 --- a/typings/alembic/op.pyi +++ b/src/database/typings/alembic/op.pyi @@ -3,20 +3,23 @@ This type stub file was generated by pyright. """ from contextlib import contextmanager -from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union -from sqlalchemy.sql.expression import TableClause, Update +from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union, overload from sqlalchemy.engine import Connection -from sqlalchemy.sql.elements import BinaryExpression, TextClause, conv +from sqlalchemy.sql import Executable +from sqlalchemy.sql.elements import ColumnElement, TextClause, conv +from sqlalchemy.sql.expression import TableClause from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column, Computed, Identity, SchemaItem, Table from sqlalchemy.sql.type_api import TypeEngine -from .operations.ops import BatchOperations, MigrateOperation +from .operations.base import BatchOperations +from .operations.ops import AddColumnOp, AddConstraintOp, AlterColumnOp, AlterTableOp, BulkInsertOp, CreateIndexOp, CreateTableCommentOp, CreateTableOp, DropColumnOp, DropConstraintOp, DropIndexOp, DropTableCommentOp, DropTableOp, ExecuteSQLOp, MigrateOperation from .runtime.migration import MigrationContext from .util.sqla_compat import _literal_bindparam if TYPE_CHECKING: ... _T = TypeVar("_T") +_C = TypeVar("_C", bound=Callable[..., Any]) def add_column(table_name: str, column: Column[Any], *, schema: Optional[str] = ...) -> None: """Issue an "add column" instruction using the current migration context. @@ -97,7 +100,7 @@ def add_column(table_name: str, column: Column[Any], *, schema: Optional[str] = """ ... -def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: +def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -136,9 +139,6 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -155,7 +155,7 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -172,9 +172,6 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -189,7 +186,7 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] ... @contextmanager -def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[tuple] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: +def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[Tuple[Any, ...]] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: """Invoke a series of per-table migrations in batch. Batch mode allows a series of operations specific to a table @@ -319,8 +316,6 @@ def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Li set is undefined. Therefore it is best to specify the complete ordering of all columns for best results. - .. versionadded:: 1.4.0 - .. note:: batch mode requires SQLAlchemy 0.8 or above. .. seealso:: @@ -330,7 +325,7 @@ def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Li """ ... -def bulk_insert(table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: +def bulk_insert(table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -428,7 +423,7 @@ def bulk_insert(table: Union[Table, TableClause], rows: List[dict], *, multiinse """ ... -def create_check_constraint(constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: +def create_check_constraint(constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -560,7 +555,7 @@ def create_foreign_key(constraint_name: Optional[str], source_table: str, refere """ ... -def create_index(index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: +def create_index(index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -588,20 +583,24 @@ def create_index(index_name: Optional[str], table_name: str, columns: Sequence[U :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -726,8 +725,6 @@ def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table: def create_table_comment(table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -839,7 +836,7 @@ def drop_constraint(constraint_name: str, table_name: str, type_: Optional[str] """ ... -def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: +def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -854,11 +851,17 @@ def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Opti quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -887,8 +890,6 @@ def drop_table_comment(table_name: str, *, existing_comment: Optional[str] = ... """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -902,7 +903,7 @@ def drop_table_comment(table_name: str, *, existing_comment: Optional[str] = ... """ ... -def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: +def execute(sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -955,7 +956,7 @@ def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optio ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -968,9 +969,8 @@ def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optio * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1043,7 +1043,7 @@ def get_context() -> MigrationContext: """ ... -def implementation_for(op_cls: Any) -> Callable[..., Any]: +def implementation_for(op_cls: Any) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. This is part of the operation extensibility API. @@ -1055,7 +1055,7 @@ def implementation_for(op_cls: Any) -> Callable[..., Any]: """ ... -def inline_literal(value: Union[str, int], type_: Optional[TypeEngine] = ...) -> _literal_bindparam: +def inline_literal(value: Union[str, int], type_: Optional[TypeEngine[Any]] = ...) -> _literal_bindparam: r"""Produce an 'inline literal' expression, suitable for using in an INSERT, UPDATE, or DELETE statement. @@ -1099,6 +1099,15 @@ def inline_literal(value: Union[str, int], type_: Optional[TypeEngine] = ...) -> """ ... +@overload +def invoke(operation: CreateTableOp) -> Table: + ... + +@overload +def invoke(operation: Union[AddConstraintOp, DropConstraintOp, CreateIndexOp, DropIndexOp, AddColumnOp, AlterColumnOp, AlterTableOp, CreateTableCommentOp, DropTableCommentOp, DropColumnOp, BulkInsertOp, DropTableOp, ExecuteSQLOp,]) -> None: + ... + +@overload def invoke(operation: MigrateOperation) -> Any: """Given a :class:`.MigrateOperation`, invoke it in terms of this :class:`.Operations` instance. @@ -1106,7 +1115,7 @@ def invoke(operation: MigrateOperation) -> Any: """ ... -def register_operation(name: str, sourcename: Optional[str] = ...) -> Callable[..., Any]: +def register_operation(name: str, sourcename: Optional[str] = ...) -> Callable[[Type[_T]], Type[_T]]: """Register a new operation for this class. This method is normally used to add new operations diff --git a/typings/alembic/operations/__init__.pyi b/src/database/typings/alembic/operations/__init__.pyi similarity index 69% rename from typings/alembic/operations/__init__.pyi rename to src/database/typings/alembic/operations/__init__.pyi index 83bb8ce7..5399e0ba 100644 --- a/typings/alembic/operations/__init__.pyi +++ b/src/database/typings/alembic/operations/__init__.pyi @@ -4,6 +4,6 @@ This type stub file was generated by pyright. from . import toimpl from .base import AbstractOperations, BatchOperations, Operations -from .ops import MigrateOperation +from .ops import MigrateOperation, MigrationScript -__all__ = ["AbstractOperations", "Operations", "BatchOperations", "MigrateOperation"] +__all__ = ["AbstractOperations", "Operations", "BatchOperations", "MigrateOperation", "MigrationScript"] diff --git a/typings/alembic/operations/base.pyi b/src/database/typings/alembic/operations/base.pyi similarity index 93% rename from typings/alembic/operations/base.pyi rename to src/database/typings/alembic/operations/base.pyi index 91c28397..406004ed 100644 --- a/typings/alembic/operations/base.pyi +++ b/src/database/typings/alembic/operations/base.pyi @@ -3,18 +3,19 @@ This type stub file was generated by pyright. """ from contextlib import contextmanager -from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union +from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union, overload from sqlalchemy.sql.elements import conv from .. import util from ..util.sqla_compat import _literal_bindparam from sqlalchemy import Table from sqlalchemy.engine import Connection -from sqlalchemy.sql.expression import BinaryExpression, TableClause, TextClause, Update +from sqlalchemy.sql import Executable +from sqlalchemy.sql.expression import ColumnElement, TableClause, TextClause from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column, Computed, Identity, SchemaItem from sqlalchemy.types import TypeEngine from .batch import BatchOperationsImpl -from .ops import MigrateOperation +from .ops import AddColumnOp, AddConstraintOp, AlterColumnOp, AlterTableOp, BulkInsertOp, CreateIndexOp, CreateTableCommentOp, CreateTableOp, DropColumnOp, DropConstraintOp, DropIndexOp, DropTableCommentOp, DropTableOp, ExecuteSQLOp, MigrateOperation from ..ddl import DefaultImpl from ..runtime.migration import MigrationContext @@ -22,6 +23,7 @@ if TYPE_CHECKING: ... __all__ = ("Operations", "BatchOperations") _T = TypeVar("_T") +_C = TypeVar("_C", bound=Callable[..., Any]) class AbstractOperations(util.ModuleClsProxy): """Base class for Operations and BatchOperations. @@ -40,7 +42,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @classmethod - def register_operation(cls, name: str, sourcename: Optional[str] = ...) -> Callable[..., Any]: + def register_operation(cls, name: str, sourcename: Optional[str] = ...) -> Callable[[Type[_T]], Type[_T]]: """Register a new operation for this class. This method is normally used to add new operations @@ -59,7 +61,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @classmethod - def implementation_for(cls, op_cls: Any) -> Callable[..., Any]: + def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. This is part of the operation extensibility API. @@ -77,7 +79,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @contextmanager - def batch_alter_table(self, table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[tuple] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: + def batch_alter_table(self, table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[Tuple[Any, ...]] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: """Invoke a series of per-table migrations in batch. Batch mode allows a series of operations specific to a table @@ -207,8 +209,6 @@ class AbstractOperations(util.ModuleClsProxy): set is undefined. Therefore it is best to specify the complete ordering of all columns for best results. - .. versionadded:: 1.4.0 - .. note:: batch mode requires SQLAlchemy 0.8 or above. .. seealso:: @@ -225,6 +225,18 @@ class AbstractOperations(util.ModuleClsProxy): """ ... + @overload + def invoke(self, operation: CreateTableOp) -> Table: + ... + + @overload + def invoke(self, operation: Union[AddConstraintOp, DropConstraintOp, CreateIndexOp, DropIndexOp, AddColumnOp, AlterColumnOp, AlterTableOp, CreateTableCommentOp, DropTableCommentOp, DropColumnOp, BulkInsertOp, DropTableOp, ExecuteSQLOp,]) -> None: + ... + + @overload + def invoke(self, operation: MigrateOperation) -> Any: + ... + def invoke(self, operation: MigrateOperation) -> Any: """Given a :class:`.MigrateOperation`, invoke it in terms of this :class:`.Operations` instance. @@ -462,7 +474,7 @@ class Operations(AbstractOperations): """ ... - def alter_column(self, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: + def alter_column(self, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -501,9 +513,6 @@ class Operations(AbstractOperations): Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -520,7 +529,7 @@ class Operations(AbstractOperations): don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -537,9 +546,6 @@ class Operations(AbstractOperations): :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -553,7 +559,7 @@ class Operations(AbstractOperations): """ ... - def bulk_insert(self, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def bulk_insert(self, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -651,7 +657,7 @@ class Operations(AbstractOperations): """ ... - def create_check_constraint(self, constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: + def create_check_constraint(self, constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -783,7 +789,7 @@ class Operations(AbstractOperations): """ ... - def create_index(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def create_index(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -811,20 +817,24 @@ class Operations(AbstractOperations): :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -949,8 +959,6 @@ class Operations(AbstractOperations): def create_table_comment(self, table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -1062,7 +1070,7 @@ class Operations(AbstractOperations): """ ... - def drop_index(self, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: + def drop_index(self, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -1077,11 +1085,17 @@ class Operations(AbstractOperations): quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -1110,8 +1124,6 @@ class Operations(AbstractOperations): """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -1125,7 +1137,7 @@ class Operations(AbstractOperations): """ ... - def execute(self, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -1178,7 +1190,7 @@ class Operations(AbstractOperations): ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -1191,9 +1203,8 @@ class Operations(AbstractOperations): * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1253,7 +1264,7 @@ class BatchOperations(AbstractOperations): """ ... - def alter_column(self, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: + def alter_column(self, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: """Issue an "alter column" instruction using the current batch migration context. @@ -1263,8 +1274,6 @@ class BatchOperations(AbstractOperations): :param insert_before: String name of an existing column which this column should be placed before, when creating the new table. - .. versionadded:: 1.4.0 - :param insert_after: String name of an existing column which this column should be placed after, when creating the new table. If both :paramref:`.BatchOperations.alter_column.insert_before` @@ -1272,8 +1281,6 @@ class BatchOperations(AbstractOperations): omitted, the column is inserted after the last existing column in the table. - .. versionadded:: 1.4.0 - .. seealso:: :meth:`.Operations.alter_column` @@ -1282,7 +1289,7 @@ class BatchOperations(AbstractOperations): """ ... - def create_check_constraint(self, constraint_name: str, condition: Union[str, BinaryExpression, TextClause], **kw: Any) -> None: + def create_check_constraint(self, constraint_name: str, condition: Union[str, ColumnElement[bool], TextClause], **kw: Any) -> None: """Issue a "create check constraint" instruction using the current batch migration context. @@ -1296,7 +1303,7 @@ class BatchOperations(AbstractOperations): """ ... - def create_exclude_constraint(self, constraint_name: str, *elements: Any, **kw: Any): # -> None: + def create_exclude_constraint(self, constraint_name: str, *elements: Any, **kw: Any) -> Optional[Table]: """Issue a "create exclude constraint" instruction using the current batch migration context. @@ -1363,8 +1370,6 @@ class BatchOperations(AbstractOperations): """Emit a COMMENT ON operation to set the comment for a table using the current batch migration context. - .. versionadded:: 1.6.0 - :param comment: string value of the comment being registered against the specified table. :param existing_comment: String value of a comment @@ -1430,15 +1435,13 @@ class BatchOperations(AbstractOperations): remove an existing comment set on a table using the current batch operations context. - .. versionadded:: 1.6.0 - :param existing_comment: An optional string value of a comment already registered on the specified table. """ ... - def execute(self, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: """Execute the given SQL using the current migration context. .. seealso:: diff --git a/typings/alembic/operations/batch.pyi b/src/database/typings/alembic/operations/batch.pyi similarity index 96% rename from typings/alembic/operations/batch.pyi rename to src/database/typings/alembic/operations/batch.pyi index 66d2e080..a8059672 100644 --- a/typings/alembic/operations/batch.pyi +++ b/src/database/typings/alembic/operations/batch.pyi @@ -4,10 +4,10 @@ This type stub file was generated by pyright. from typing import Any, Dict, Literal, Optional, TYPE_CHECKING, Union from sqlalchemy import Column, Index, Table +from sqlalchemy.sql.schema import Constraint from sqlalchemy.engine import Dialect from sqlalchemy.sql.elements import ColumnClause from sqlalchemy.sql.functions import Function -from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.type_api import TypeEngine from ..ddl.impl import DefaultImpl @@ -46,10 +46,10 @@ class BatchOperationsImpl: def rename_table(self, *arg, **kw): # -> None: ... - def create_index(self, idx: Index) -> None: + def create_index(self, idx: Index, **kw: Any) -> None: ... - def drop_index(self, idx: Index) -> None: + def drop_index(self, idx: Index, **kw: Any) -> None: ... def create_table_comment(self, table): # -> None: diff --git a/typings/alembic/operations/ops.pyi b/src/database/typings/alembic/operations/ops.pyi similarity index 92% rename from typings/alembic/operations/ops.pyi rename to src/database/typings/alembic/operations/ops.pyi index 97e57266..746e3bec 100644 --- a/typings/alembic/operations/ops.pyi +++ b/src/database/typings/alembic/operations/ops.pyi @@ -3,21 +3,24 @@ This type stub file was generated by pyright. """ from abc import abstractmethod -from typing import Any, Callable, FrozenSet, List, Literal, MutableMapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Type, Union +from typing import Any, Callable, Dict, FrozenSet, List, Literal, MutableMapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Type, TypeVar, Union from .base import BatchOperations, Operations from .. import util from ..util import sqla_compat -from sqlalchemy.sql.dml import Insert, Update -from sqlalchemy.sql.elements import BinaryExpression, ColumnElement, TextClause, conv, quoted_name +from sqlalchemy.sql import Executable +from sqlalchemy.sql.elements import ColumnElement, TextClause, conv, quoted_name from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import CheckConstraint, Column, Computed, Constraint, ForeignKeyConstraint, Identity, Index, MetaData, PrimaryKeyConstraint, SchemaItem, Table, UniqueConstraint from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine from ..autogenerate.rewriter import Rewriter from ..runtime.migration import MigrationContext +from ..script.revision import _RevIdType if TYPE_CHECKING: ... +_T = TypeVar("_T", bound=Any) +_AC = TypeVar("_AC", bound="AddConstraintOp") class MigrateOperation: """base class for migration command and organization objects. @@ -33,7 +36,7 @@ class MigrateOperation: """ @util.memoized_property - def info(self): # -> dict[Unknown, Unknown]: + def info(self) -> Dict[Any, Any]: """A dictionary that may be used to store arbitrary information along with this :class:`.MigrateOperation` object. @@ -53,11 +56,11 @@ class AddConstraintOp(MigrateOperation): """Represent an add constraint operation.""" add_constraint_ops = ... @property - def constraint_type(self): + def constraint_type(self) -> str: ... @classmethod - def register_add_constraint(cls, type_: str) -> Callable: + def register_add_constraint(cls, type_: str) -> Callable[[Type[_AC]], Type[_AC]]: ... @classmethod @@ -392,7 +395,7 @@ class CreateCheckConstraintOp(AddConstraintOp): ... @classmethod - def create_check_constraint(cls, operations: Operations, constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: + def create_check_constraint(cls, operations: Operations, constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -436,7 +439,7 @@ class CreateCheckConstraintOp(AddConstraintOp): ... @classmethod - def batch_create_check_constraint(cls, operations: BatchOperations, constraint_name: str, condition: Union[str, BinaryExpression, TextClause], **kw: Any) -> None: + def batch_create_check_constraint(cls, operations: BatchOperations, constraint_name: str, condition: Union[str, ColumnElement[bool], TextClause], **kw: Any) -> None: """Issue a "create check constraint" instruction using the current batch migration context. @@ -456,7 +459,7 @@ class CreateCheckConstraintOp(AddConstraintOp): @BatchOperations.register_operation("create_index", "batch_create_index") class CreateIndexOp(MigrateOperation): """Represent a create index operation.""" - def __init__(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def __init__(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: ... def reverse(self) -> DropIndexOp: @@ -473,7 +476,7 @@ class CreateIndexOp(MigrateOperation): ... @classmethod - def create_index(cls, operations: Operations, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def create_index(cls, operations: Operations, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -501,20 +504,24 @@ class CreateIndexOp(MigrateOperation): :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -537,7 +544,7 @@ class CreateIndexOp(MigrateOperation): @BatchOperations.register_operation("drop_index", "batch_drop_index") class DropIndexOp(MigrateOperation): """Represent a drop index operation.""" - def __init__(self, index_name: Union[quoted_name, str, conv], table_name: Optional[str] = ..., *, schema: Optional[str] = ..., _reverse: Optional[CreateIndexOp] = ..., **kw: Any) -> None: + def __init__(self, index_name: Union[quoted_name, str, conv], table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., _reverse: Optional[CreateIndexOp] = ..., **kw: Any) -> None: ... def to_diff_tuple(self) -> Tuple[str, Index]: @@ -554,7 +561,7 @@ class DropIndexOp(MigrateOperation): ... @classmethod - def drop_index(cls, operations: Operations, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: + def drop_index(cls, operations: Operations, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -569,11 +576,17 @@ class DropIndexOp(MigrateOperation): quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -776,8 +789,6 @@ class CreateTableCommentOp(AlterTableOp): def create_table_comment(cls, operations: Operations, table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -800,8 +811,6 @@ class CreateTableCommentOp(AlterTableOp): """Emit a COMMENT ON operation to set the comment for a table using the current batch migration context. - .. versionadded:: 1.6.0 - :param comment: string value of the comment being registered against the specified table. :param existing_comment: String value of a comment @@ -812,14 +821,14 @@ class CreateTableCommentOp(AlterTableOp): """ ... - def reverse(self): # -> Any: + def reverse(self) -> Union[CreateTableCommentOp, DropTableCommentOp]: """Reverses the COMMENT ON operation against a table.""" ... - def to_table(self, migration_context=...): # -> Table: + def to_table(self, migration_context: Optional[MigrationContext] = ...) -> Table: ... - def to_diff_tuple(self): # -> tuple[Literal['add_table_comment'], Table, str | None]: + def to_diff_tuple(self) -> Tuple[Any, ...]: ... @@ -836,8 +845,6 @@ class DropTableCommentOp(AlterTableOp): """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -857,22 +864,20 @@ class DropTableCommentOp(AlterTableOp): remove an existing comment set on a table using the current batch operations context. - .. versionadded:: 1.6.0 - :param existing_comment: An optional string value of a comment already registered on the specified table. """ ... - def reverse(self): # -> Any: + def reverse(self) -> CreateTableCommentOp: """Reverses the COMMENT ON operation against a table.""" ... - def to_table(self, migration_context=...): # -> Table: + def to_table(self, migration_context: Optional[MigrationContext] = ...) -> Table: ... - def to_diff_tuple(self): # -> tuple[Literal['remove_table_comment'], Table]: + def to_diff_tuple(self) -> Tuple[Any, ...]: ... @@ -894,7 +899,7 @@ class AlterColumnOp(AlterTableOp): ... @classmethod - def alter_column(cls, operations: Operations, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: + def alter_column(cls, operations: Operations, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_type: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -933,9 +938,6 @@ class AlterColumnOp(AlterTableOp): Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -952,7 +954,7 @@ class AlterColumnOp(AlterTableOp): don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -969,9 +971,6 @@ class AlterColumnOp(AlterTableOp): :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -986,7 +985,7 @@ class AlterColumnOp(AlterTableOp): ... @classmethod - def batch_alter_column(cls, operations: BatchOperations, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: + def batch_alter_column(cls, operations: BatchOperations, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_type: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: """Issue an "alter column" instruction using the current batch migration context. @@ -996,8 +995,6 @@ class AlterColumnOp(AlterTableOp): :param insert_before: String name of an existing column which this column should be placed before, when creating the new table. - .. versionadded:: 1.4.0 - :param insert_after: String name of an existing column which this column should be placed after, when creating the new table. If both :paramref:`.BatchOperations.alter_column.insert_before` @@ -1005,8 +1002,6 @@ class AlterColumnOp(AlterTableOp): omitted, the column is inserted after the last existing column in the table. - .. versionadded:: 1.4.0 - .. seealso:: :meth:`.Operations.alter_column` @@ -1030,11 +1025,11 @@ class AddColumnOp(AlterTableOp): def to_diff_tuple(self) -> Tuple[str, Optional[str], str, Column[Any]]: ... - def to_column(self) -> Column: + def to_column(self) -> Column[Any]: ... @classmethod - def from_column(cls, col: Column) -> AddColumnOp: + def from_column(cls, col: Column[Any]) -> AddColumnOp: ... @classmethod @@ -1153,7 +1148,7 @@ class DropColumnOp(AlterTableOp): def from_column_and_tablename(cls, schema: Optional[str], tname: str, col: Column[Any]) -> DropColumnOp: ... - def to_column(self, migration_context: Optional[MigrationContext] = ...) -> Column: + def to_column(self, migration_context: Optional[MigrationContext] = ...) -> Column[Any]: ... @classmethod @@ -1213,11 +1208,11 @@ class DropColumnOp(AlterTableOp): @Operations.register_operation("bulk_insert") class BulkInsertOp(MigrateOperation): """Represent a bulk insert operation.""" - def __init__(self, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def __init__(self, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: ... @classmethod - def bulk_insert(cls, operations: Operations, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def bulk_insert(cls, operations: Operations, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -1321,11 +1316,11 @@ class BulkInsertOp(MigrateOperation): @BatchOperations.register_operation("execute", "batch_execute") class ExecuteSQLOp(MigrateOperation): """Represent an execute SQL operation.""" - def __init__(self, sqltext: Union[Update, str, Insert, TextClause], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def __init__(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: ... @classmethod - def execute(cls, operations: Operations, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(cls, operations: Operations, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -1378,7 +1373,7 @@ class ExecuteSQLOp(MigrateOperation): ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -1391,9 +1386,8 @@ class ExecuteSQLOp(MigrateOperation): * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1410,7 +1404,7 @@ class ExecuteSQLOp(MigrateOperation): ... @classmethod - def batch_execute(cls, operations: Operations, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def batch_execute(cls, operations: Operations, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: """Execute the given SQL using the current migration context. .. seealso:: @@ -1420,6 +1414,9 @@ class ExecuteSQLOp(MigrateOperation): """ ... + def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]: + ... + class OpContainer(MigrateOperation): @@ -1477,7 +1474,7 @@ class DowngradeOps(OpContainer): def __init__(self, ops: Sequence[MigrateOperation] = ..., downgrade_token: str = ...) -> None: ... - def reverse(self): # -> UpgradeOps: + def reverse(self) -> UpgradeOps: ... @@ -1506,11 +1503,13 @@ class MigrationScript(MigrateOperation): """ _needs_render: Optional[bool] - def __init__(self, rev_id: Optional[str], upgrade_ops: UpgradeOps, downgrade_ops: DowngradeOps, *, message: Optional[str] = ..., imports: Set[str] = ..., head: Optional[str] = ..., splice: Optional[bool] = ..., branch_label: Optional[str] = ..., version_path: Optional[str] = ..., depends_on: Optional[Union[str, Sequence[str]]] = ...) -> None: + _upgrade_ops: List[UpgradeOps] + _downgrade_ops: List[DowngradeOps] + def __init__(self, rev_id: Optional[str], upgrade_ops: UpgradeOps, downgrade_ops: DowngradeOps, *, message: Optional[str] = ..., imports: Set[str] = ..., head: Optional[str] = ..., splice: Optional[bool] = ..., branch_label: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ...) -> None: ... @property - def upgrade_ops(self): # -> Any | None: + def upgrade_ops(self) -> Optional[UpgradeOps]: """An instance of :class:`.UpgradeOps`. .. seealso:: @@ -1520,11 +1519,11 @@ class MigrationScript(MigrateOperation): ... @upgrade_ops.setter - def upgrade_ops(self, upgrade_ops): # -> None: + def upgrade_ops(self, upgrade_ops: Union[UpgradeOps, List[UpgradeOps]]) -> None: ... @property - def downgrade_ops(self): # -> Any | None: + def downgrade_ops(self) -> Optional[DowngradeOps]: """An instance of :class:`.DowngradeOps`. .. seealso:: @@ -1534,7 +1533,7 @@ class MigrationScript(MigrateOperation): ... @downgrade_ops.setter - def downgrade_ops(self, downgrade_ops): # -> None: + def downgrade_ops(self, downgrade_ops: Union[DowngradeOps, List[DowngradeOps]]) -> None: ... @property diff --git a/typings/alembic/operations/schemaobj.pyi b/src/database/typings/alembic/operations/schemaobj.pyi similarity index 100% rename from typings/alembic/operations/schemaobj.pyi rename to src/database/typings/alembic/operations/schemaobj.pyi diff --git a/typings/alembic/operations/toimpl.pyi b/src/database/typings/alembic/operations/toimpl.pyi similarity index 100% rename from typings/alembic/operations/toimpl.pyi rename to src/database/typings/alembic/operations/toimpl.pyi diff --git a/typings/alembic/runtime/__init__.pyi b/src/database/typings/alembic/runtime/__init__.pyi similarity index 100% rename from typings/alembic/runtime/__init__.pyi rename to src/database/typings/alembic/runtime/__init__.pyi diff --git a/typings/alembic/runtime/environment.pyi b/src/database/typings/alembic/runtime/environment.pyi similarity index 89% rename from typings/alembic/runtime/environment.pyi rename to src/database/typings/alembic/runtime/environment.pyi index 7b9d5a4f..12c9f4f0 100644 --- a/typings/alembic/runtime/environment.pyi +++ b/src/database/typings/alembic/runtime/environment.pyi @@ -2,74 +2,31 @@ This type stub file was generated by pyright. """ -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Collection, - ContextManager, - Dict, - List, - Mapping, - MutableMapping, - Optional, - TextIO, - Tuple, - Union, - overload, -) - -from sqlalchemy.engine import URL -from sqlalchemy.engine.base import Connection -from sqlalchemy.sql.elements import ClauseElement +from typing import Any, Callable, Collection, ContextManager, Dict, List, Mapping, MutableMapping, Optional, Sequence, TYPE_CHECKING, TextIO, Tuple, Union, overload from sqlalchemy.sql.schema import FetchedValue, MetaData from typing_extensions import Literal - +from .migration import MigrationContext, _ProxyTransaction from .. import util +from ..script.revision import _GetRevArg +from sqlalchemy.engine import URL +from sqlalchemy.engine.base import Connection +from sqlalchemy.sql import Executable from ..config import Config from ..ddl import DefaultImpl from ..script.base import ScriptDirectory -from .migration import MigrationContext, _ProxyTransaction -if TYPE_CHECKING: ... +if TYPE_CHECKING: + ... _RevNumber = Optional[Union[str, Tuple[str, ...]]] -ProcessRevisionDirectiveFn = Callable[ - [MigrationContext, Tuple[str, str], List["MigrateOperation"]], None -] +ProcessRevisionDirectiveFn = Callable[[MigrationContext, _GetRevArg, List["MigrationScript"]], None] RenderItemFn = Callable[[str, Any, "AutogenContext"], Union[str, Literal[False]]] -NameFilterType = Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", -] -NameFilterParentNames = MutableMapping[ - Literal["schema_name", "table_name", "schema_qualified_table_name"], - Optional[str], -] +NameFilterType = Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",] +NameFilterParentNames = MutableMapping[Literal["schema_name", "table_name", "schema_qualified_table_name"], Optional[str],] IncludeNameFn = Callable[[Optional[str], NameFilterType, NameFilterParentNames], bool] -IncludeObjectFn = Callable[ - ["SchemaItem", Optional[str], NameFilterType, bool, Optional["SchemaItem"]], - bool, -] -OnVersionApplyFn = Callable[ - [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], - None, -] -CompareServerDefault = Callable[ - [ - MigrationContext, - "Column[Any]", - "Column[Any]", - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], -] - +IncludeObjectFn = Callable[["SchemaItem", Optional[str], NameFilterType, bool, Optional["SchemaItem"]], bool,] +OnVersionApplyFn = Callable[[MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], None,] +CompareServerDefault = Callable[[MigrationContext, "Column[Any]", "Column[Any]", Optional[str], Optional[FetchedValue], Optional[str]], Optional[bool],] +CompareType = Callable[[MigrationContext, "Column[Any]", "Column[Any]", "TypeEngine[Any]", "TypeEngine[Any]"], Optional[bool],] class EnvironmentContext(util.ModuleClsProxy): """A configurational facade made available in an ``env.py`` script. @@ -133,7 +90,6 @@ class EnvironmentContext(util.ModuleClsProxy): ``env.py`` module present in the migration environment. """ - _migration_context: Optional[MigrationContext] = ... config: Config = ... script: ScriptDirectory = ... @@ -148,6 +104,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def __enter__(self) -> EnvironmentContext: """Establish a context which provides a :class:`.EnvironmentContext` object to @@ -158,7 +115,10 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def __exit__(self, *arg: Any, **kw: Any) -> None: ... + + def __exit__(self, *arg: Any, **kw: Any) -> None: + ... + def is_offline_mode(self) -> bool: """Return True if the current migrations environment is running in "offline mode". @@ -171,7 +131,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def is_transactional_ddl(self): # -> bool: + + def is_transactional_ddl(self) -> bool: """Return True if the context is configured to expect a transactional DDL capable backend. @@ -184,7 +145,10 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def requires_connection(self) -> bool: ... + + def requires_connection(self) -> bool: + ... + def get_head_revision(self) -> _RevNumber: """Return the hex identifier of the 'head' script revision. @@ -199,6 +163,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_head_revisions(self) -> _RevNumber: """Return the hex identifier of the 'heads' script revision(s). @@ -210,6 +175,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_starting_revision_argument(self) -> _RevNumber: """Return the 'starting revision' argument, if the revision was passed using ``start:end``. @@ -223,6 +189,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_revision_argument(self) -> _RevNumber: """Get the 'destination' revision argument. @@ -238,6 +205,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_tag_argument(self) -> Optional[str]: """Return the value passed for the ``--tag`` argument, if any. @@ -257,17 +225,20 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + @overload - def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: ... + def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: + ... + @overload - def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]: ... + def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]: + ... + @overload - def get_x_argument( - self, as_dictionary: bool = ... - ) -> Union[List[str], Dict[str, str]]: ... - def get_x_argument( - self, as_dictionary: bool = ... - ) -> Union[List[str], Dict[str, str]]: + def get_x_argument(self, as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: + ... + + def get_x_argument(self, as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: """Return the value(s) passed for the ``-x`` argument, if any. The ``-x`` argument is an open ended flag that allows any user-defined @@ -277,7 +248,11 @@ class EnvironmentContext(util.ModuleClsProxy): The return value is a list, returned directly from the ``argparse`` structure. If ``as_dictionary=True`` is passed, the ``x`` arguments are parsed using ``key=value`` format into a dictionary that is - then returned. + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. For example, to support passing a database URL on the command line, the standard ``env.py`` script can be modified like this:: @@ -307,39 +282,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def configure( - self, - connection: Optional[Connection] = ..., - url: Optional[Union[str, URL]] = ..., - dialect_name: Optional[str] = ..., - dialect_opts: Optional[Dict[str, Any]] = ..., - transactional_ddl: Optional[bool] = ..., - transaction_per_migration: bool = ..., - output_buffer: Optional[TextIO] = ..., - starting_rev: Optional[str] = ..., - tag: Optional[str] = ..., - template_args: Optional[Dict[str, Any]] = ..., - render_as_batch: bool = ..., - # Alembic documents and supports list[MetaData] - # despite the typehint not including it in the - # library - target_metadata: Optional[MetaData | list[MetaData]] = ..., - include_name: Optional[IncludeNameFn] = ..., - include_object: Optional[IncludeObjectFn] = ..., - include_schemas: bool = ..., - process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ..., - compare_type: bool = ..., - compare_server_default: Union[bool, CompareServerDefault] = ..., - render_item: Optional[RenderItemFn] = ..., - literal_binds: bool = ..., - upgrade_token: str = ..., - downgrade_token: str = ..., - alembic_module_prefix: str = ..., - sqlalchemy_module_prefix: str = ..., - user_module_prefix: Optional[str] = ..., - on_version_apply: Optional[OnVersionApplyFn] = ..., - **kw: Any - ) -> None: + + def configure(self, connection: Optional[Connection] = ..., url: Optional[Union[str, URL]] = ..., dialect_name: Optional[str] = ..., dialect_opts: Optional[Dict[str, Any]] = ..., transactional_ddl: Optional[bool] = ..., transaction_per_migration: bool = ..., output_buffer: Optional[TextIO] = ..., starting_rev: Optional[str] = ..., tag: Optional[str] = ..., template_args: Optional[Dict[str, Any]] = ..., render_as_batch: bool = ..., target_metadata: Union[MetaData, Sequence[MetaData], None] = ..., include_name: Optional[IncludeNameFn] = ..., include_object: Optional[IncludeObjectFn] = ..., include_schemas: bool = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ..., compare_type: Union[bool, CompareType] = ..., compare_server_default: Union[bool, CompareServerDefault] = ..., render_item: Optional[RenderItemFn] = ..., literal_binds: bool = ..., upgrade_token: str = ..., downgrade_token: str = ..., alembic_module_prefix: str = ..., sqlalchemy_module_prefix: str = ..., user_module_prefix: Optional[str] = ..., on_version_apply: Optional[OnVersionApplyFn] = ..., **kw: Any) -> None: """Configure a :class:`.MigrationContext` within this :class:`.EnvironmentContext` which will provide database connectivity and other configuration to a series of @@ -384,9 +328,6 @@ class EnvironmentContext(util.ModuleClsProxy): ``connection`` and ``url`` are not passed. :param dialect_opts: dictionary of options to be passed to dialect constructor. - - .. versionadded:: 1.0.12 - :param transactional_ddl: Force the usage of "transactional" DDL on or off; this otherwise defaults to whether or not the dialect in @@ -469,12 +410,16 @@ class EnvironmentContext(util.ModuleClsProxy): to produce candidate upgrade/downgrade operations. :param compare_type: Indicates type comparison behavior during an autogenerate - operation. Defaults to ``False`` which disables type - comparison. Set to - ``True`` to turn on default type comparison, which has varied - accuracy depending on backend. See :ref:`compare_types` + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` for an example as well as information on other type - comparison options. + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. .. seealso:: @@ -558,8 +503,6 @@ class EnvironmentContext(util.ModuleClsProxy): include_name = include_name ) - .. versionadded:: 1.5 - .. seealso:: :ref:`autogenerate_include_hooks` @@ -774,6 +717,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def run_migrations(self, **kw: Any) -> None: """Run migrations as determined by the current command line configuration @@ -795,9 +739,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def execute( - self, sql: Union[ClauseElement, str], execution_options: Optional[dict] = ... - ) -> None: + + def execute(self, sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute the given SQL using the current change context. The behavior of :meth:`.execute` is the same @@ -810,6 +753,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def static_output(self, text: str) -> None: """Emit text directly to the "offline" SQL stream. @@ -820,6 +764,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def begin_transaction(self) -> Union[_ProxyTransaction, ContextManager[None]]: """Return a context manager that will enclose an operation within a "transaction", @@ -864,6 +809,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_context(self) -> MigrationContext: """Return the current :class:`.MigrationContext` object. @@ -872,6 +818,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_bind(self) -> Connection: """Return the current 'bind'. @@ -884,4 +831,9 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def get_impl(self) -> DefaultImpl: ... + + def get_impl(self) -> DefaultImpl: + ... + + + diff --git a/typings/alembic/runtime/migration.pyi b/src/database/typings/alembic/runtime/migration.pyi similarity index 98% rename from typings/alembic/runtime/migration.pyi rename to src/database/typings/alembic/runtime/migration.pyi index c975722a..fb1aeea5 100644 --- a/typings/alembic/runtime/migration.pyi +++ b/src/database/typings/alembic/runtime/migration.pyi @@ -6,7 +6,7 @@ from contextlib import contextmanager from typing import Any, Collection, ContextManager, Dict, Iterator, List, Optional, Set, TYPE_CHECKING, Tuple, Union from sqlalchemy.engine import Dialect, URL from sqlalchemy.engine.base import Connection -from sqlalchemy.sql.elements import ClauseElement +from sqlalchemy.sql import Executable from .environment import EnvironmentContext from ..config import Config from ..script.base import Script, ScriptDirectory @@ -151,8 +151,6 @@ class MigrationContext: migrations whether or not one of them has an autocommit block. - .. versionadded:: 1.2.0 - """ ... @@ -271,7 +269,7 @@ class MigrationContext: """ ... - def execute(self, sql: Union[ClauseElement, str], execution_options: Optional[dict] = ...) -> None: + def execute(self, sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute a SQL construct or string statement. The underlying execution mechanics are used, that is @@ -396,6 +394,11 @@ class MigrationStep: to_revisions_no_deps: Tuple[str, ...] is_upgrade: bool migration_fn: Any + if TYPE_CHECKING: + @property + def doc(self) -> Optional[str]: + ... + @property def name(self) -> str: ... @@ -432,7 +435,7 @@ class RevisionStep(MigrationStep): ... @property - def doc(self) -> str: + def doc(self) -> Optional[str]: ... @property @@ -462,7 +465,7 @@ class RevisionStep(MigrationStep): def merge_branch_idents(self, heads: Set[str]) -> Tuple[List[str], str, str]: ... - def unmerge_branch_idents(self, heads: Collection[str]) -> Tuple[str, str, Tuple[str, ...]]: + def unmerge_branch_idents(self, heads: Set[str]) -> Tuple[str, str, Tuple[str, ...]]: ... def should_create_branch(self, heads: Set[str]) -> bool: diff --git a/typings/alembic/script/__init__.pyi b/src/database/typings/alembic/script/__init__.pyi similarity index 100% rename from typings/alembic/script/__init__.pyi rename to src/database/typings/alembic/script/__init__.pyi diff --git a/typings/alembic/script/base.pyi b/src/database/typings/alembic/script/base.pyi similarity index 90% rename from typings/alembic/script/base.pyi rename to src/database/typings/alembic/script/base.pyi index 339ae9c3..e886afdd 100644 --- a/typings/alembic/script/base.pyi +++ b/src/database/typings/alembic/script/base.pyi @@ -3,18 +3,21 @@ This type stub file was generated by pyright. """ from types import ModuleType -from typing import Any, Iterator, List, Mapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Union +from typing import Any, Iterator, List, Mapping, Optional, Set, TYPE_CHECKING, Tuple, Union from . import revision +from ..util import compat +from .revision import Revision, _GetRevArg, _RevIdType from ..config import Config, MessagingOptions -from ..script.revision import Revision if TYPE_CHECKING: ... -_RevIdType = Union[str, Sequence[str]] +if compat.py39: + ... +else: + ... _sourceless_rev_file = ... _only_source_rev_file = ... _legacy_rev = ... -_mod_def_re = ... _slug_re = ... _default_file_template = ... _split_on_space_comma = ... @@ -69,17 +72,17 @@ class ScriptDirectory: """ ... - def get_revisions(self, id_: _RevIdType) -> Tuple[Optional[Script], ...]: + def get_revisions(self, id_: _GetRevArg) -> Tuple[Script, ...]: """Return the :class:`.Script` instance with the given rev identifier, symbolic name, or sequence of identifiers. """ ... - def get_all_current(self, id_: Tuple[str, ...]) -> Set[Optional[Script]]: + def get_all_current(self, id_: Tuple[str, ...]) -> Set[Script]: ... - def get_revision(self, id_: str) -> Optional[Script]: + def get_revision(self, id_: str) -> Script: """Return the :class:`.Script` instance with the given rev id. .. seealso:: @@ -176,10 +179,10 @@ class ScriptDirectory: ... @property - def env_py_location(self): # -> str: + def env_py_location(self) -> str: ... - def generate_revision(self, revid: str, message: Optional[str], head: Optional[str] = ..., refresh: bool = ..., splice: Optional[bool] = ..., branch_labels: Optional[str] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ..., **kw: Any) -> Optional[Script]: + def generate_revision(self, revid: str, message: Optional[str], head: Optional[_RevIdType] = ..., splice: Optional[bool] = ..., branch_labels: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ..., **kw: Any) -> Optional[Script]: """Generate a new revision file. This runs the ``script.py.mako`` template, given @@ -195,7 +198,6 @@ class ScriptDirectory: :param splice: if True, allow the "head" version to not be an actual head; otherwise, the selected head must be a head (e.g. endpoint) revision. - :param refresh: deprecated. """ ... diff --git a/typings/alembic/script/revision.pyi b/src/database/typings/alembic/script/revision.pyi similarity index 87% rename from typings/alembic/script/revision.pyi rename to src/database/typings/alembic/script/revision.pyi index 3cd91379..92db9ee5 100644 --- a/typings/alembic/script/revision.pyi +++ b/src/database/typings/alembic/script/revision.pyi @@ -2,20 +2,28 @@ This type stub file was generated by pyright. """ -from typing import Any, Callable, Collection, Dict, FrozenSet, Iterable, Iterator, Optional, Sequence, Set, TYPE_CHECKING, Tuple, TypeVar, Union, overload +from typing import Any, Callable, Dict, FrozenSet, Iterable, Iterator, List, Optional, Protocol, Sequence, Set, TYPE_CHECKING, Tuple, TypeVar, Union, overload from .. import util if TYPE_CHECKING: ... -_RevIdType = Union[str, Sequence[str]] +_RevIdType = Union[str, List[str], Tuple[str, ...]] +_GetRevArg = Union[str, Iterable[Optional[str]], Iterable[str],] _RevisionIdentifierType = Union[str, Tuple[str, ...], None] _RevisionOrStr = Union["Revision", str] _RevisionOrBase = Union["Revision", "Literal['base']"] _InterimRevisionMapType = Dict[str, "Revision"] _RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]] -_T = TypeVar("_T", bound=Union[str, "Revision"]) +_T = TypeVar("_T") +_TR = TypeVar("_TR", bound=Optional[_RevisionOrStr]) _relative_destination = ... _revision_illegal_chars = ... +class _CollectRevisionsProtocol(Protocol): + def __call__(self, upper: _RevisionIdentifierType, lower: _RevisionIdentifierType, inclusive: bool, implicit_base: bool, assert_relative_length: bool) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: + ... + + + class RevisionError(Exception): ... @@ -137,7 +145,7 @@ class RevisionMap: """ ... - def get_revisions(self, id_: Union[str, Collection[Optional[str]], None]) -> Tuple[Optional[_RevisionOrBase], ...]: + def get_revisions(self, id_: Optional[_GetRevArg]) -> Tuple[Optional[_RevisionOrBase], ...]: """Return the :class:`.Revision` instances with the given rev id or identifiers. @@ -171,7 +179,7 @@ class RevisionMap: """ ... - def filter_for_lineage(self, targets: Iterable[_T], check_against: Optional[str], include_dependencies: bool = ...) -> Tuple[_T, ...]: + def filter_for_lineage(self, targets: Iterable[_TR], check_against: Optional[str], include_dependencies: bool = ...) -> Tuple[_TR, ...]: ... def iterate_revisions(self, upper: _RevisionIdentifierType, lower: _RevisionIdentifierType, implicit_base: bool = ..., inclusive: bool = ..., assert_relative_length: bool = ..., select_for_downgrade: bool = ...) -> Iterator[Revision]: @@ -257,14 +265,14 @@ class Revision: @overload -def tuple_rev_as_scalar(rev: Optional[Sequence[str]]) -> Optional[Union[str, Sequence[str]]]: +def tuple_rev_as_scalar(rev: None) -> None: ... @overload -def tuple_rev_as_scalar(rev: Optional[Sequence[Optional[str]]]) -> Optional[Union[Optional[str], Sequence[Optional[str]]]]: +def tuple_rev_as_scalar(rev: Union[Tuple[_T, ...], List[_T]]) -> Union[_T, Tuple[_T, ...], List[_T]]: ... -def tuple_rev_as_scalar(rev): # -> None: +def tuple_rev_as_scalar(rev: Optional[Sequence[_T]]) -> Union[_T, Sequence[_T], None]: ... def is_revision(rev: Any) -> Revision: diff --git a/typings/alembic/script/write_hooks.pyi b/src/database/typings/alembic/script/write_hooks.pyi similarity index 83% rename from typings/alembic/script/write_hooks.pyi rename to src/database/typings/alembic/script/write_hooks.pyi index 031c161a..748c1dd4 100644 --- a/typings/alembic/script/write_hooks.pyi +++ b/src/database/typings/alembic/script/write_hooks.pyi @@ -11,8 +11,6 @@ def register(name: str) -> Callable: See the documentation linked below for an example. - .. versionadded:: 1.2.0 - .. seealso:: :ref:`post_write_hooks_custom` @@ -25,3 +23,7 @@ def register(name: str) -> Callable: def console_scripts(path: str, options: dict, ignore_output: bool = ...) -> None: ... +@register("exec") +def exec_(path: str, options: dict, ignore_output: bool = ...) -> None: + ... + diff --git a/typings/alembic/testing/__init__.pyi b/src/database/typings/alembic/testing/__init__.pyi similarity index 100% rename from typings/alembic/testing/__init__.pyi rename to src/database/typings/alembic/testing/__init__.pyi diff --git a/typings/alembic/testing/assertions.pyi b/src/database/typings/alembic/testing/assertions.pyi similarity index 94% rename from typings/alembic/testing/assertions.pyi rename to src/database/typings/alembic/testing/assertions.pyi index d86a42ae..848def88 100644 --- a/typings/alembic/testing/assertions.pyi +++ b/src/database/typings/alembic/testing/assertions.pyi @@ -45,7 +45,7 @@ def expect_warnings(*messages, **kw): """ ... -def emits_python_deprecation_warning(*messages): # -> (_Fn@decorator) -> _Fn@decorator: +def emits_python_deprecation_warning(*messages): # -> Any: """Decorator form of expect_warnings(). Note that emits_warning does **not** assert that the warnings diff --git a/typings/alembic/testing/env.pyi b/src/database/typings/alembic/testing/env.pyi similarity index 100% rename from typings/alembic/testing/env.pyi rename to src/database/typings/alembic/testing/env.pyi diff --git a/typings/alembic/testing/fixtures.pyi b/src/database/typings/alembic/testing/fixtures.pyi similarity index 92% rename from typings/alembic/testing/fixtures.pyi rename to src/database/typings/alembic/testing/fixtures.pyi index d1be4210..bbd09422 100644 --- a/typings/alembic/testing/fixtures.pyi +++ b/src/database/typings/alembic/testing/fixtures.pyi @@ -21,7 +21,7 @@ class TestBase(SQLAlchemyTestBase): ... @testing.fixture - def connection(self): # -> Generator[db, Any, None]: + def connection(self): # -> Generator[Any, Any, None]: ... @@ -37,7 +37,7 @@ else: __requires__ = ... -def capture_db(dialect=...): # -> tuple[Engine, list[Unknown]]: +def capture_db(dialect=...): # -> tuple[Engine, list[Any]]: ... _engs: Dict[Any, Any] = ... diff --git a/typings/alembic/testing/util.pyi b/src/database/typings/alembic/testing/util.pyi similarity index 89% rename from typings/alembic/testing/util.pyi rename to src/database/typings/alembic/testing/util.pyi index cee802b5..c3f3113f 100644 --- a/typings/alembic/testing/util.pyi +++ b/src/database/typings/alembic/testing/util.pyi @@ -2,7 +2,7 @@ This type stub file was generated by pyright. """ -def flag_combinations(*combinations): # -> (_FN@combinations) -> _FN@combinations: +def flag_combinations(*combinations): """A facade around @testing.combinations() oriented towards boolean keyword-based arguments. @@ -43,7 +43,7 @@ def resolve_lambda(__fn, **kw): # -> Any: """ ... -def metadata_fixture(ddl=...): # -> (fn: Unknown) -> Unknown: +def metadata_fixture(ddl=...): # -> Callable[..., Any]: """Provide MetaData for a pytest fixture.""" ... diff --git a/src/database/typings/alembic/util/__init__.pyi b/src/database/typings/alembic/util/__init__.pyi new file mode 100644 index 00000000..5f57f0e8 --- /dev/null +++ b/src/database/typings/alembic/util/__init__.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from .editor import open_in_editor as open_in_editor +from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected, CommandError as CommandError +from .langhelpers import Dispatcher as Dispatcher, EMPTY_DICT as EMPTY_DICT, ModuleClsProxy as ModuleClsProxy, _with_legacy_names as _with_legacy_names, asbool as asbool, dedupe_tuple as dedupe_tuple, immutabledict as immutabledict, memoized_property as memoized_property, not_none as not_none, rev_id as rev_id, to_list as to_list, to_tuple as to_tuple, unique_list as unique_list +from .messaging import err as err, format_as_comma as format_as_comma, msg as msg, obfuscate_url_pw as obfuscate_url_pw, status as status, warn as warn, write_outstream as write_outstream +from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename, load_python_file as load_python_file, pyc_file_from_path as pyc_file_from_path, template_to_file as template_to_file +from .sqla_compat import has_computed as has_computed, sqla_13 as sqla_13, sqla_14 as sqla_14, sqla_2 as sqla_2 + +if not sqla_13: + ... diff --git a/src/database/typings/alembic/util/compat.pyi b/src/database/typings/alembic/util/compat.pyi new file mode 100644 index 00000000..d68cb70f --- /dev/null +++ b/src/database/typings/alembic/util/compat.pyi @@ -0,0 +1,40 @@ +""" +This type stub file was generated by pyright. +""" + +import io +import os +import importlib_resources +import importlib_metadata +from configparser import ConfigParser +from typing import Any, List, Optional, Sequence, Union +from importlib.metadata import EntryPoint as EntryPoint +from importlib_metadata import EntryPoint + +if True: + ... +is_posix = ... +py311 = ... +py310 = ... +py39 = ... +class EncodedIO(io.TextIOWrapper): + def close(self) -> None: + ... + + + +if py39: + importlib_resources = ... + importlib_metadata = ... +else: + ... +def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: + ... + +def formatannotation_fwdref(annotation: Any, base_module: Optional[Any] = ...) -> str: + """vendored from python 3.7""" + ... + +def read_config_parser(file_config: ConfigParser, file_argument: Sequence[Union[str, os.PathLike[str]]]) -> List[str]: + ... + diff --git a/typings/alembic/util/editor.pyi b/src/database/typings/alembic/util/editor.pyi similarity index 100% rename from typings/alembic/util/editor.pyi rename to src/database/typings/alembic/util/editor.pyi diff --git a/typings/alembic/util/exc.pyi b/src/database/typings/alembic/util/exc.pyi similarity index 100% rename from typings/alembic/util/exc.pyi rename to src/database/typings/alembic/util/exc.pyi diff --git a/typings/alembic/util/langhelpers.pyi b/src/database/typings/alembic/util/langhelpers.pyi similarity index 57% rename from typings/alembic/util/langhelpers.pyi rename to src/database/typings/alembic/util/langhelpers.pyi index c466b204..d0de856a 100644 --- a/typings/alembic/util/langhelpers.pyi +++ b/src/database/typings/alembic/util/langhelpers.pyi @@ -2,13 +2,15 @@ This type stub file was generated by pyright. """ -from typing import (Any, Callable, Dict, Mapping, Optional, Tuple, TypeVar, - overload) +from typing import Any, Callable, Dict, List, Mapping, MutableMapping, Optional, Set, Tuple, Type, TypeVar, overload +if True: + ... EMPTY_DICT: Mapping[Any, Any] = ... -_T = TypeVar("_T") +_T = TypeVar("_T", bound=Any) +_C = TypeVar("_C", bound=Callable[..., Any]) class _ModuleClsMeta(type): - def __setattr__(cls, key: str, value: Callable) -> None: + def __setattr__(cls, key: str, value: Callable[..., Any]) -> None: ... @@ -21,9 +23,9 @@ class ModuleClsProxy(metaclass=_ModuleClsMeta): as the methods. """ - _setups: Dict[type, Tuple[set, list]] = ... + _setups: Dict[Type[Any], Tuple[Set[str], List[Tuple[MutableMapping[str, Any], MutableMapping[str, Any]]],],] = ... @classmethod - def create_module_class_proxy(cls, globals_, locals_): # -> None: + def create_module_class_proxy(cls, globals_: MutableMapping[str, Any], locals_: MutableMapping[str, Any]) -> None: ... @@ -32,7 +34,7 @@ def rev_id() -> str: ... @overload -def to_tuple(x: Any, default: tuple) -> tuple: +def to_tuple(x: Any, default: Tuple[Any, ...]) -> Tuple[Any, ...]: ... @overload @@ -40,10 +42,10 @@ def to_tuple(x: None, default: Optional[_T] = ...) -> _T: ... @overload -def to_tuple(x: Any, default: Optional[tuple] = ...) -> tuple: +def to_tuple(x: Any, default: Optional[Tuple[Any, ...]] = ...) -> Tuple[Any, ...]: ... -def to_tuple(x, default=...): # -> tuple[str] | tuple[Unknown, ...] | tuple[Unknown] | None: +def to_tuple(x: Any, default: Optional[Tuple[Any, ...]] = ...) -> Optional[Tuple[Any, ...]]: ... def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]: @@ -53,7 +55,7 @@ class Dispatcher: def __init__(self, uselist: bool = ...) -> None: ... - def dispatch_for(self, target: Any, qualifier: str = ...) -> Callable: + def dispatch_for(self, target: Any, qualifier: str = ...) -> Callable[[_C], _C]: ... def dispatch(self, obj: Any, qualifier: str = ...) -> Any: diff --git a/typings/alembic/util/messaging.pyi b/src/database/typings/alembic/util/messaging.pyi similarity index 84% rename from typings/alembic/util/messaging.pyi rename to src/database/typings/alembic/util/messaging.pyi index 83ce7d11..f0bcf425 100644 --- a/typings/alembic/util/messaging.pyi +++ b/src/database/typings/alembic/util/messaging.pyi @@ -4,7 +4,7 @@ This type stub file was generated by pyright. from collections.abc import Iterable from contextlib import contextmanager -from typing import Optional, TextIO, Union +from typing import Iterator, Optional, TextIO, Union log = ... ioctl = ... @@ -14,10 +14,10 @@ def write_outstream(stream: TextIO, *text: Union[str, bytes], quiet: bool = ...) ... @contextmanager -def status(status_msg: str, newline: bool = ..., quiet: bool = ...): # -> Generator[None, Any, None]: +def status(status_msg: str, newline: bool = ..., quiet: bool = ...) -> Iterator[None]: ... -def err(message: str, quiet: bool = ...): +def err(message: str, quiet: bool = ...) -> None: ... def obfuscate_url_pw(input_url: str) -> str: diff --git a/typings/alembic/util/pyfiles.pyi b/src/database/typings/alembic/util/pyfiles.pyi similarity index 73% rename from typings/alembic/util/pyfiles.pyi rename to src/database/typings/alembic/util/pyfiles.pyi index d56dcd65..f651fbdd 100644 --- a/typings/alembic/util/pyfiles.pyi +++ b/src/database/typings/alembic/util/pyfiles.pyi @@ -2,9 +2,10 @@ This type stub file was generated by pyright. """ -from typing import Optional +from types import ModuleType +from typing import Any, Optional -def template_to_file(template_file: str, dest: str, output_encoding: str, **kw) -> None: +def template_to_file(template_file: str, dest: str, output_encoding: str, **kw: Any) -> None: ... def coerce_resource_to_filename(fname: str) -> str: @@ -21,10 +22,10 @@ def pyc_file_from_path(path: str) -> Optional[str]: """Given a python source path, locate the .pyc.""" ... -def load_python_file(dir_: str, filename: str): # -> ModuleType: +def load_python_file(dir_: str, filename: str) -> ModuleType: """Load a file from the given path as a Python module.""" ... -def load_module_py(module_id: str, path: str): # -> ModuleType: +def load_module_py(module_id: str, path: str) -> ModuleType: ... diff --git a/typings/alembic/util/sqla_compat.pyi b/src/database/typings/alembic/util/sqla_compat.pyi similarity index 77% rename from typings/alembic/util/sqla_compat.pyi rename to src/database/typings/alembic/util/sqla_compat.pyi index db7d1b70..2a5df80a 100644 --- a/typings/alembic/util/sqla_compat.pyi +++ b/src/database/typings/alembic/util/sqla_compat.pyi @@ -2,28 +2,40 @@ This type stub file was generated by pyright. """ -from typing import Optional, TYPE_CHECKING, TypeVar, Union -from sqlalchemy import Index, Table, __version__, sql, types as sqltypes +from typing import Any, Callable, Optional, Protocol, TYPE_CHECKING, Type, TypeVar, Union +from sqlalchemy import ClauseElement, Index, Table, sql, types as sqltypes +from sqlalchemy.sql.base import _NoneName from sqlalchemy.sql.elements import BindParameter, ColumnElement, TextClause from typing_extensions import TypeGuard -from sqlalchemy.sql.base import _NoneName from sqlalchemy.util import symbol as _NoneName if TYPE_CHECKING: ... _CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"]) +class _CompilerProtocol(Protocol): + def __call__(self, element: Any, compiler: Any, **kw: Any) -> str: + ... + + + _vers = ... sqla_13 = ... sqla_14 = ... sqla_14_18 = ... sqla_14_26 = ... sqla_2 = ... -sqlalchemy_version = __version__ +sqlalchemy_version = ... class _Unsupported: "Placeholder for unsupported SQLAlchemy classes" ... +if TYPE_CHECKING: + def compiles(element: Type[ClauseElement], *dialects: str) -> Callable[[_CompilerProtocol], _CompilerProtocol]: + ... + +else: + ... if sqla_2: ... else: @@ -66,7 +78,7 @@ class _textual_index_element(sql.ColumnElement): def __init__(self, table: Table, text: TextClause) -> None: ... - def get_children(self): # -> list[Unknown]: + def get_children(self): # -> list[Column[NullType]]: ... @@ -76,7 +88,7 @@ class _literal_bindparam(BindParameter): if sqla_14: - ... + _select = ... else: def create_mock_engine(url, executor, **kw): # -> Engine: ... @@ -84,3 +96,6 @@ else: def is_expression_index(index: Index) -> bool: ... +def is_expression(expr: Any) -> bool: + ... + diff --git a/typings/alembic b/typings/alembic new file mode 120000 index 00000000..b6eb5412 --- /dev/null +++ b/typings/alembic @@ -0,0 +1 @@ +../src/database/typings/alembic/ \ No newline at end of file diff --git a/typings/alembic/autogenerate/__init__.pyi b/typings/alembic/autogenerate/__init__.pyi deleted file mode 100644 index 6320c04f..00000000 --- a/typings/alembic/autogenerate/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from .api import RevisionContext, _render_migration_diffs, compare_metadata, produce_migrations, render_python_code -from .compare import _produce_net_changes, comparators -from .render import render_op_text, renderers -from .rewriter import Rewriter - diff --git a/typings/alembic/autogenerate/compare.pyi b/typings/alembic/autogenerate/compare.pyi deleted file mode 100644 index eb1032db..00000000 --- a/typings/alembic/autogenerate/compare.pyi +++ /dev/null @@ -1,63 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union -from sqlalchemy.sql.elements import quoted_name -from sqlalchemy.sql.schema import ForeignKeyConstraint, Index, UniqueConstraint -from alembic.autogenerate.api import AutogenContext -from alembic.ddl.impl import DefaultImpl - -if TYPE_CHECKING: - ... -log = ... -comparators = ... -_IndexColumnSortingOps: Mapping[str, Any] = ... -class _constraint_sig: - const: Union[UniqueConstraint, ForeignKeyConstraint, Index] - def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - def __hash__(self) -> int: - ... - - - -class _uq_constraint_sig(_constraint_sig): - is_index = ... - is_unique = ... - def __init__(self, const: UniqueConstraint) -> None: - ... - - @property - def column_names(self) -> List[str]: - ... - - - -class _ix_constraint_sig(_constraint_sig): - is_index = ... - def __init__(self, const: Index, impl: DefaultImpl) -> None: - ... - - def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: - ... - - @property - def column_names(self) -> Union[List[quoted_name], List[None]]: - ... - - - -class _fk_constraint_sig(_constraint_sig): - def __init__(self, const: ForeignKeyConstraint, include_options: bool = ...) -> None: - ... - - - diff --git a/typings/alembic/util/__init__.pyi b/typings/alembic/util/__init__.pyi deleted file mode 100644 index 71cf917e..00000000 --- a/typings/alembic/util/__init__.pyi +++ /dev/null @@ -1,13 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from .editor import open_in_editor -from .exc import AutogenerateDiffsDetected, CommandError -from .langhelpers import Dispatcher, EMPTY_DICT, ModuleClsProxy, _with_legacy_names, asbool, dedupe_tuple, immutabledict, memoized_property, not_none, rev_id, to_list, to_tuple, unique_list -from .messaging import err, format_as_comma, msg, obfuscate_url_pw, status, warn, write_outstream -from .pyfiles import coerce_resource_to_filename, load_python_file, pyc_file_from_path, template_to_file -from .sqla_compat import has_computed, sqla_13, sqla_14, sqla_2 - -if notsqla_13: - ... diff --git a/typings/alembic/util/compat.pyi b/typings/alembic/util/compat.pyi deleted file mode 100644 index 3ace0450..00000000 --- a/typings/alembic/util/compat.pyi +++ /dev/null @@ -1,30 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import io -from typing import Sequence -from importlib.metadata import EntryPoint -from importlib_metadata import EntryPoint - -is_posix = ... -py311 = ... -py39 = ... -py38 = ... -class EncodedIO(io.TextIOWrapper): - def close(self) -> None: - ... - - - -if py39: - ... -else: - ... -def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: - ... - -def formatannotation_fwdref(annotation, base_module=...): # -> str: - """vendored from python 3.7""" - ... - From 746048fd6d82fc3edccdf38dbf3b45bca383c5c5 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 20 Mar 2024 12:23:12 -0700 Subject: [PATCH 23/55] Some small alterations to how Alembic is called. --- .../database/migrations/alembic/__main__.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 2eccf637..690c1916 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -5,7 +5,7 @@ from pathlib import Path # this is Alembic's main entry point -from alembic.config import CommandLine +from alembic.config import main as alembic_main def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None): @@ -20,11 +20,6 @@ def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None if not argv: argv = sys.argv[1:] - # do some set up stuff - - alembic_command_line = CommandLine(None) - # alembic_parsed_args = alembic_command_line.parser.parse_args(argv) - # if a config file has been specified on the # command line, use it and don't create # a temporary one @@ -40,7 +35,7 @@ def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None ): logger.debug("Running unmodified `alembic` command.") # run Alembic - return alembic_command_line.main(argv) + return alembic_main(argv) logger.debug("Running `alembic` with modified command.") with ( @@ -52,8 +47,11 @@ def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None # without seeking to the 0th byte _ = f2.seek(0) - argv.insert(0, "-c") - argv.insert(1, f2.name) + argv = ["-c", f2.name] + argv # run Alembic - return alembic_command_line.main(argv) + return alembic_main(argv) + + +if __name__ == "__main__": + bl_alembic() From 907f540d9100c99eee09508393360d77b3acb34e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 20 Mar 2024 15:56:17 -0700 Subject: [PATCH 24/55] Create alembic migration directory automatically if it does not exist. --- .../database/migrations/alembic/__main__.py | 61 +++++++++++++++---- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 690c1916..273b4efe 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -4,7 +4,10 @@ from os import environ from pathlib import Path +import alembic.util.messaging + # this is Alembic's main entry point +from alembic.config import CommandLine, Config from alembic.config import main as alembic_main @@ -20,23 +23,29 @@ def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None if not argv: argv = sys.argv[1:] - # if a config file has been specified on the - # command line, use it and don't create - # a temporary one - # print(alembic_parsed_args) args = set(argv) - if ( - not args - or "-c" in args - or "--config" in args - or "-h" in args - or "--help" in args - ): + if not args or "-h" in args or "--help" in args: logger.debug("Running unmodified `alembic` command.") # run Alembic return alembic_main(argv) + # needs to open the config and return it + # so we can get the alembic migration directory + def get_config_obj(argv): + alembic_cli = CommandLine() + parsed_args = alembic_cli.parser.parse_args(argv) + return Config(parsed_args.config) + + # if a config file has been specified on the + # command line, use it and don't create + # a temporary one + if "-c" in args or "--config" in args: + logger.debug("Running unmodified `alembic` command.") + conf = get_config_obj(argv) + print(conf.get_main_option("script_location")) + return alembic_main(argv) + logger.debug("Running `alembic` with modified command.") with ( open(Path(Path(__file__).resolve().parent, "alembic.ini"), "r") as f1, @@ -47,10 +56,38 @@ def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None # without seeking to the 0th byte _ = f2.seek(0) + conf = Config(f2.name) + argv = ["-c", f2.name] + argv + conf = get_config_obj(argv) + script_location = conf.get_main_option("script_location") or "alembic" + + _created_alembic_dir_marker = False + _msg_original = alembic.util.messaging.msg + def _msg_new( + msg: str, newline: bool = True, flush: bool = False, quiet: bool = False + ): + nonlocal _created_alembic_dir_marker + nonlocal script_location + if "use the 'init' command to create a new scripts folder" in msg: + _msg_original( + "'alembic' migration directory does not exist. Creating it." + ) + alembic_main(["init", script_location]) + _created_alembic_dir_marker = True + else: + _msg_original(msg, newline, flush, quiet) + + alembic.util.messaging.msg = _msg_new # run Alembic - return alembic_main(argv) + try: + return alembic_main(argv) + except SystemExit: + if not _created_alembic_dir_marker: + raise + finally: + alembic.util.messaging.msg = _msg_original if __name__ == "__main__": From ce81de560319ef850efc03cb691ccd15d3512d45 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 21 Mar 2024 15:04:36 -0700 Subject: [PATCH 25/55] Clean up Alembic migration script. - Added documentation - Added logs - Organized code - Fixed errors in code --- .../database/migrations/alembic/__main__.py | 325 ++++++++++++++---- src/database/typings/alembic/config.pyi | 6 +- 2 files changed, 268 insertions(+), 63 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 273b4efe..7ae0d925 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -1,93 +1,294 @@ import logging import sys import tempfile +from contextlib import contextmanager +from logging import Logger from os import environ from pathlib import Path +from types import TracebackType +from typing import Any, Callable, Generator import alembic.util.messaging # this is Alembic's main entry point from alembic.config import CommandLine, Config from alembic.config import main as alembic_main +from attr import dataclass +from typing_extensions import final -def bl_alembic(argv: list[str] | None = None, log_level: int | str | None = None): - logging.basicConfig(level=logging.INFO) - if not log_level: - log_level = environ.get("LOG_LEVEL") - log_level = int(log_level) if log_level else logging.INFO +@final +class BLAlembic: + _run: Callable[[], None] + _log: Logger - logger = logging.getLogger() - logger.setLevel(log_level) + def __init__(self, argv: list[str] | None, logger: Logger) -> None: + """ + _summary_ - if not argv: - argv = sys.argv[1:] + :param list[str] | None argv: The command line arguments to be parsed by ArgumentParser. + If None, this will use `sys.argv[1:]` to use CLI arguments. + :param Logger logger: A logger for writing messages. + """ + self._log = logger - args = set(argv) + if not argv: + argv = sys.argv[1:] - if not args or "-h" in args or "--help" in args: - logger.debug("Running unmodified `alembic` command.") - # run Alembic - return alembic_main(argv) + args = set(argv) - # needs to open the config and return it - # so we can get the alembic migration directory - def get_config_obj(argv): + if not args or "-h" in args or "--help" in args: + self._log.debug(f"Empty or 'help' args passed from Alembic: {args}") + self._run = lambda: self._run_with_alembic_defaults(argv) + elif "-c" in args or "--config" in args: + self._log.debug(f"'config' args passed from Alembic: {args}") + self._run = lambda: self._run_with_specified_config(argv) + else: + self._log.debug(f"Execution-only args passed from Alembic: {args}") + self._run = lambda: self._run_with_bl_alembic_config(argv) + + def _get_config(self, argv: list[str]) -> Config: + """ + Get a parsed Alembic INI file as a `Config` object. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return Config: The `Config` object with options set from an INI file. + """ + # needs to open the config and return it + # so we can get the alembic migration directory alembic_cli = CommandLine() parsed_args = alembic_cli.parser.parse_args(argv) - return Config(parsed_args.config) - - # if a config file has been specified on the - # command line, use it and don't create - # a temporary one - if "-c" in args or "--config" in args: - logger.debug("Running unmodified `alembic` command.") - conf = get_config_obj(argv) - print(conf.get_main_option("script_location")) + self._log.debug(f"Parsed arguments: {parsed_args}") + config = Config(parsed_args.config) + self._log.debug(f"Instantiated config: {repr(config)}") + return config + + def _run_with_alembic_defaults(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when no command line arguments, or `-h` or `--help`, are specified. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") return alembic_main(argv) - logger.debug("Running `alembic` with modified command.") - with ( - open(Path(Path(__file__).resolve().parent, "alembic.ini"), "r") as f1, - tempfile.NamedTemporaryFile("w+b") as f2, - ): - f2.writelines(f1.buffer) - # the file will not be read correctly - # without seeking to the 0th byte - _ = f2.seek(0) + def _run_with_specified_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when `-c` or `--config` are specified. - conf = Config(f2.name) + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") + self._execute_alembic(argv) - argv = ["-c", f2.name] + argv - conf = get_config_obj(argv) - script_location = conf.get_main_option("script_location") or "alembic" + def _run_with_bl_alembic_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically after creating a temporary + config file from the BL_Python default Alembic config, and + forcing the temporary config file to be used by `alembic`. - _created_alembic_dir_marker = False - _msg_original = alembic.util.messaging.msg + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running `alembic` with modified command.") + with self._write_bl_alembic_config() as config_file: + argv = ["-c", config_file.name] + argv - def _msg_new( - msg: str, newline: bool = True, flush: bool = False, quiet: bool = False - ): - nonlocal _created_alembic_dir_marker + self._execute_alembic(argv) + + def _execute_alembic(self, argv: list[str]) -> None: + """ + Programmatically run `alembic`. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + config = self._get_config(argv) + + with self._initialize_alembic(config) as msg_capture: + try: + return alembic_main(argv) + except SystemExit: + # If SystemExit is from anything other than + # needing to create the init dir, then crash. + # This is doable/reliable because Alembic first writes + # a message that the directory needs to be created, + # then calls `sys.exit(-1)`. + if not msg_capture.seen: + raise + + self._log.debug( + f"The Alembic initialization error was seen. Ignoring `{SystemExit.__name__}` exception." + ) + + def _initialize_alembic(self, config: Config): + """ + Set up Alembic to run `alembic init` programmatically if it is needed. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return MsgCaptureCtxManager: A type indicating whether an expected message was + written by Alembic. In the case of this method, if the "use the 'init'" + message is seen, then `alembic init` is executed. This type can be used to + determine whether `alembic init` was executed. + """ + script_location = config.get_main_option("script_location") or "alembic" + + def _msg_new(msg: Callable[[str, bool, bool, bool], None]): nonlocal script_location - if "use the 'init' command to create a new scripts folder" in msg: - _msg_original( - "'alembic' migration directory does not exist. Creating it." + self._log.debug("Executing `alembic init`.") + msg( + "'alembic' migration directory does not exist. Creating it.", + # these bool values are defaults for Alembic msg function + True, + False, + False, + ) + alembic_main(["init", script_location]) + + return self._alembic_msg_capture( + "use the 'init' command to create a new scripts folder", _msg_new + ) + + @contextmanager + def _write_bl_alembic_config( + self, + ) -> "Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]": # pyright: ignore[reportPrivateUsage] + """ + Write the BL_Python Alembic tool's default configuration file to a temp file. + + :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. + """ + with tempfile.NamedTemporaryFile("w+b") as temp_config_file: + self._log.debug(f"Temp file created at '{temp_config_file.name}'.") + with open( + Path(Path(__file__).resolve().parent, "alembic.ini"), "r" + ) as default_config_file: + self._log.debug( + f"Writing config file 'alembic.ini' to temp file '{temp_config_file.name}'." ) - alembic_main(["init", script_location]) - _created_alembic_dir_marker = True - else: - _msg_original(msg, newline, flush, quiet) - - alembic.util.messaging.msg = _msg_new - # run Alembic - try: - return alembic_main(argv) - except SystemExit: - if not _created_alembic_dir_marker: - raise - finally: - alembic.util.messaging.msg = _msg_original + temp_config_file.writelines(default_config_file.buffer) + + # the file will not be read correctly + # without seeking to the 0th byte + _ = temp_config_file.seek(0) + + # yield so the temp file isn't deleted + yield temp_config_file + + def _alembic_msg_capture( + self, + msg_to_capture: str, + callback: Callable[[Callable[[str, bool, bool, bool], None]], None], + ): + """ + Capture a specific message written by Alembic, and call `callback` if it matches. + + This method override's Alembic's `msg` function and restores it when the + context is closed. + + :param str msg_to_capture: The specific message to monitor in Alembic's writes. + :param Callable[[Callable[[str, bool, bool, bool], None]], None] callback: + A callable that receives Alembic's `msg` function as a parameter. + """ + + OVERRIDDEN_ORIGINAL_ATTR_NAME = "_overridden_original" + if hasattr(alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME): + # if the attr exists that means we have already overriden it, + # so we set `_msg_original` to the real original. + self._log.debug( + f"`alembic.util.messaging.msg` has already been overwritten. Using `{OVERRIDDEN_ORIGINAL_ATTR_NAME}` attribute to get the original method." + ) + _msg_original = getattr( + alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME + ) + else: + self._log.debug( + f"`alembic.util.messaging.msg` has not been overridden. Using it as the original method." + ) + # if the attr does not exist, then we assume `msg` is + # the original Alembic `msg` function. + _msg_original: Callable[[str, bool, bool, bool], None] = ( + alembic.util.messaging.msg + ) + + @dataclass + class MessageSeen: + seen: bool = False + + @final + class MsgCaptureCtxManager: + _msg_seen: MessageSeen = MessageSeen() + _log: Logger + + def __init__(self, logger: Logger) -> None: + self._log = logger + + def __enter__(self): + # this function replaces Alembic's `msg` function + self._log.debug(f"Entering `{MsgCaptureCtxManager.__name__}` context.") + + def _msg_new( + msg: str, + newline: bool = True, + flush: bool = False, + quiet: bool = False, + ): + if msg_to_capture in msg: + self._log.debug( + f"The msg '{msg_to_capture}' was written by Alembic." + ) + callback(_msg_original) + self._msg_seen.seen = True + else: + _msg_original(msg, newline, flush, quiet) + + setattr( + _msg_new, OVERRIDDEN_ORIGINAL_ATTR_NAME, alembic.util.messaging.msg + ) + + self._log.debug( + f"Overwritting `alembic.util.messaging.msg` with `{repr(_msg_new)}`." + ) + alembic.util.messaging.msg = _msg_new + + return self._msg_seen + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + self._log.debug(f"Exiting `{MsgCaptureCtxManager.__name__}` context.") + alembic.util.messaging.msg = _msg_original + return True + + return MsgCaptureCtxManager(self._log) + + def run(self): + self._log.debug("Bootstrapping and executing `alembic` process.") + return self._run() + + +def bl_alembic( + argv: list[str] | None = None, log_level: int | str | None = None +) -> None: + logging.basicConfig(level=logging.INFO) + if not log_level: + log_level = environ.get("LOG_LEVEL") + log_level = int(log_level) if log_level else logging.INFO + + logger = logging.getLogger() + logger.setLevel(log_level) + + bla = BLAlembic(argv, logger) + bla.run() if __name__ == "__main__": diff --git a/src/database/typings/alembic/config.pyi b/src/database/typings/alembic/config.pyi index e0ae27da..3875e344 100644 --- a/src/database/typings/alembic/config.pyi +++ b/src/database/typings/alembic/config.pyi @@ -3,10 +3,12 @@ This type stub file was generated by pyright. """ import os -from argparse import Namespace +from argparse import ArgumentParser, Namespace from configparser import ConfigParser from typing import Any, Dict, Mapping, Optional, Sequence, TextIO, Union, overload + from typing_extensions import TypedDict + from . import util class Config: @@ -244,6 +246,8 @@ class MessagingOptions(TypedDict, total=False): class CommandLine: + parser: ArgumentParser + def __init__(self, prog: Optional[str] = ...) -> None: ... From 2a1b7e24447d542a63b7dd7495d77440a8eb5961 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 21 Mar 2024 16:30:44 -0700 Subject: [PATCH 26/55] Handle placement of BL_Python env scripts for Alembic. --- .../database/migrations/alembic/__main__.py | 66 +++++++++++++++++-- .../migrations/alembic/_replacement_env.py | 6 ++ .../alembic/_replacement_env_setup.py | 1 + .../database/migrations/alembic/env_setup.py | 38 +++++++++-- 4 files changed, 100 insertions(+), 11 deletions(-) create mode 100644 src/database/BL_Python/database/migrations/alembic/_replacement_env.py create mode 100644 src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 7ae0d925..de84f49c 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -114,7 +114,8 @@ def _execute_alembic(self, argv: list[str]) -> None: with self._initialize_alembic(config) as msg_capture: try: return alembic_main(argv) - except SystemExit: + except SystemExit as e: + self._log.error(e) # If SystemExit is from anything other than # needing to create the init dir, then crash. # This is doable/reliable because Alembic first writes @@ -136,6 +137,7 @@ def _initialize_alembic(self, config: Config): written by Alembic. In the case of this method, if the "use the 'init'" message is seen, then `alembic init` is executed. This type can be used to determine whether `alembic init` was executed. + :return MsgCaptureCtxManager: """ script_location = config.get_main_option("script_location") or "alembic" @@ -151,10 +153,41 @@ def _msg_new(msg: Callable[[str, bool, bool, bool], None]): ) alembic_main(["init", script_location]) + self._overwrite_alembic_env_files(config) + return self._alembic_msg_capture( "use the 'init' command to create a new scripts folder", _msg_new ) + def _overwrite_alembic_env_files(self, config: Config) -> None: + """ + Overwrite env.py and env_setup.py in an Alembic migrations directory. + Currently this only runs if `alembic init` is executed, and care must + be taken if we intend to change this to overwrite the files if they exist. + The files will exist if `alembic init` was executed prior to this tool. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return None: + """ + script_location = config.get_main_option("script_location") or "alembic" + bl_python_alembic_file_dir = Path(__file__).resolve().parent + + files = [ + ( + Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), + Path(script_location, f"{basename}.py"), + ) + for basename in ["env", "env_setup"] + ] + + for file in files: + self._log.debug(f"Rewriting base Alembic files: {file}") + with ( + open(file[0], "r") as replacement, + open(file[1], "w+b") as original, + ): + original.writelines(replacement.buffer) + @contextmanager def _write_bl_alembic_config( self, @@ -195,6 +228,7 @@ def _alembic_msg_capture( :param str msg_to_capture: The specific message to monitor in Alembic's writes. :param Callable[[Callable[[str, bool, bool, bool], None]], None] callback: A callable that receives Alembic's `msg` function as a parameter. + :return MsgCaptureCtxManager: """ OVERRIDDEN_ORIGINAL_ATTR_NAME = "_overridden_original" @@ -229,8 +263,13 @@ class MsgCaptureCtxManager: def __init__(self, logger: Logger) -> None: self._log = logger - def __enter__(self): - # this function replaces Alembic's `msg` function + def __enter__(self) -> MessageSeen: + """ + Replace Alembic's `msg` function in order to execute + a callback when certain messages are seen. + + :return _type_: _description_ + """ self._log.debug(f"Entering `{MsgCaptureCtxManager.__name__}` context.") def _msg_new( @@ -265,13 +304,26 @@ def __exit__( exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool: + """ + Revert replacing Alembic's `msg` function by restoring the original. + + :param type[BaseException] | None exc_type: + :param BaseException | None exc_val: + :param TracebackType | None exc_tb: + :return bool: + """ self._log.debug(f"Exiting `{MsgCaptureCtxManager.__name__}` context.") alembic.util.messaging.msg = _msg_original return True return MsgCaptureCtxManager(self._log) - def run(self): + def run(self) -> None: + """ + Run Alembic migrations, initializing Alembic if necessary. + + :return None: + """ self._log.debug("Bootstrapping and executing `alembic` process.") return self._run() @@ -279,6 +331,12 @@ def run(self): def bl_alembic( argv: list[str] | None = None, log_level: int | str | None = None ) -> None: + """ + A method to support the `bl-alembic` command, which replaces `alembic. + + :param list[str] | None argv: CLI arguments, defaults to None + :param int | str | None log_level: An integer log level to configure logging verbosity, defaults to None + """ logging.basicConfig(level=logging.INFO) if not log_level: log_level = environ.get("LOG_LEVEL") diff --git a/src/database/BL_Python/database/migrations/alembic/_replacement_env.py b/src/database/BL_Python/database/migrations/alembic/_replacement_env.py new file mode 100644 index 00000000..2915ec9b --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/_replacement_env.py @@ -0,0 +1,6 @@ +from pathlib import Path + +from BL_Python.database.migrations.alembic.env import run_migrations + +# TODO replace with your MetaBase types and config file path +run_migrations(bases=[], config_filename=Path("config.toml")) diff --git a/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py b/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py new file mode 100644 index 00000000..a9474781 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py @@ -0,0 +1 @@ +from BL_Python.database.migrations.alembic.env_setup import * diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py index f0a5d3f7..751049a2 100644 --- a/src/database/BL_Python/database/migrations/alembic/env_setup.py +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from functools import lru_cache from logging.config import fileConfig -from typing import Any, List, Protocol, cast, final +from typing import Any, List, Literal, Protocol, cast, final from alembic import context from BL_Python.database.config import DatabaseConfig @@ -19,13 +19,27 @@ from sqlalchemy import MetaData, Table, engine_from_config, pool from sqlalchemy.engine import Connectable, Connection, Engine from sqlalchemy.exc import ProgrammingError +from sqlalchemy.schema import SchemaItem # from AWS import load_ssm_application_parameters class type_include_object(Protocol): + # self, object: Table, name: str, type_: str, reflected: Any, compare_to: Any def __call__( - self, object: Table, name: str, type_: str, reflected: Any, compare_to: Any + self, + object: SchemaItem, + name: str | None, + type_: Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + reflected: bool, + compare_to: SchemaItem | None, ) -> bool: ... @@ -85,9 +99,20 @@ def get_metadata(self, bases: list[MetaBase]): def include_schemas(names: List[str]): def include_object( - object: Table, name: str, type_: str, reflected: Any, compare_to: Any - ): - if type_ == "table": + object: SchemaItem | Table, + name: str | None, + type_: Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + reflected: bool, + compare_to: SchemaItem | None, + ) -> bool: + if type_ == "table" and isinstance(object, Table): return object.schema in names return True @@ -183,7 +208,6 @@ def run_migrations_offline(self, bases: list[MetaBase]): script output. """ - config = self.get_config() metadata = self.get_metadata(bases) target_metadata = metadata.target_metadata @@ -213,7 +237,7 @@ def run_migrations_online(self, bases: list[MetaBase]): """ config = self.get_config() - connectable: Connectable | None = cast(dict[str, Any], config.attributes).get( # pyright: ignore[reportUnknownMemberType] + connectable: Connectable | None = cast(dict[str, Any], config.attributes).get( "connection", None ) From a015712a8775dbf00af1901a9cea38749c2f47b2 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 21 Mar 2024 16:45:19 -0700 Subject: [PATCH 27/55] Fix inconsistent error in CI/CD vs. local caused by wildcard import. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index a1ac34ed..b57e4611 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,6 +139,7 @@ reportUninitializedInstanceVariable = "information" reportUnnecessaryTypeIgnoreComment = "information" reportUnusedCallResult = "information" reportMissingTypeStubs = "information" +reportWildcardImportFromLibrary = "warning" [tool.pytest.ini_options] pythonpath = [ From 69077f2f8d13bea1d376171598510ab852a50bfc Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 09:43:26 -0700 Subject: [PATCH 28/55] Don't create an alembic.ini if it already exists but was not specific on the CLI. --- .../database/migrations/alembic/__main__.py | 49 +++++++++++++------ 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index de84f49c..3507d5e8 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -19,6 +19,8 @@ @final class BLAlembic: + DEFAULT_CONFIG_NAME: str = "alembic.ini" + _run: Callable[[], None] _log: Logger @@ -45,7 +47,7 @@ def __init__(self, argv: list[str] | None, logger: Logger) -> None: self._run = lambda: self._run_with_specified_config(argv) else: self._log.debug(f"Execution-only args passed from Alembic: {args}") - self._run = lambda: self._run_with_bl_alembic_config(argv) + self._run = lambda: self._run_with_config(argv) def _get_config(self, argv: list[str]) -> Config: """ @@ -87,20 +89,27 @@ def _run_with_specified_config(self, argv: list[str]) -> None: self._log.debug("Running unmodified `alembic` command.") self._execute_alembic(argv) - def _run_with_bl_alembic_config(self, argv: list[str]) -> None: + def _run_with_config(self, argv: list[str]) -> None: """ - Calls `alembic` programmatically after creating a temporary - config file from the BL_Python default Alembic config, and - forcing the temporary config file to be used by `alembic`. + Calls `alembic` programmatically either: + - if the file 'alembic.ini' exists in the same working + directory in which the command is run. + - if the file 'alembic.ini' does not exist and after creating + a temporary configuration file from the BL_Python default Alembic + config, and forcing the temporary configuration file to be used + by `alembic`. :param list[str] argv: The command line arguments to be parsed by ArgumentParser. :return None: """ - self._log.debug("Running `alembic` with modified command.") - with self._write_bl_alembic_config() as config_file: - argv = ["-c", config_file.name] + argv + if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): + self._log.debug("Running `alembic` with modified command.") + with self._write_bl_alembic_config() as config_file: + argv = ["-c", config_file.name] + argv + else: + self._log.debug("Running `alembic` with discovered configuration file.") - self._execute_alembic(argv) + self._execute_alembic(argv) def _execute_alembic(self, argv: list[str]) -> None: """ @@ -172,21 +181,28 @@ def _overwrite_alembic_env_files(self, config: Config) -> None: script_location = config.get_main_option("script_location") or "alembic" bl_python_alembic_file_dir = Path(__file__).resolve().parent - files = [ + files = ( ( Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), Path(script_location, f"{basename}.py"), ) for basename in ["env", "env_setup"] - ] + ) for file in files: + source_path, destination_path = file + if Path(destination_path).exists(): + self._log.warn( + f"The file '{destination_path}' already exists, but this is unexpected. Refusing to overwrite. Please report this problem." + ) + continue + self._log.debug(f"Rewriting base Alembic files: {file}") with ( - open(file[0], "r") as replacement, - open(file[1], "w+b") as original, + open(source_path, "r") as source, + open(destination_path, "w+b") as destination, ): - original.writelines(replacement.buffer) + destination.writelines(source.buffer) @contextmanager def _write_bl_alembic_config( @@ -200,10 +216,11 @@ def _write_bl_alembic_config( with tempfile.NamedTemporaryFile("w+b") as temp_config_file: self._log.debug(f"Temp file created at '{temp_config_file.name}'.") with open( - Path(Path(__file__).resolve().parent, "alembic.ini"), "r" + Path(Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME), + "r", ) as default_config_file: self._log.debug( - f"Writing config file 'alembic.ini' to temp file '{temp_config_file.name}'." + f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' to temp file '{temp_config_file.name}'." ) temp_config_file.writelines(default_config_file.buffer) From 5a898bfcdbddf48356e83d243f936d50a7ebbb39 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 14:05:35 -0700 Subject: [PATCH 29/55] Don't use tempfile for bl-alembic. Fix some logic errors with file creation. --- .../database/migrations/alembic/__main__.py | 118 +++++++++++------- 1 file changed, 71 insertions(+), 47 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 3507d5e8..d6f50ae8 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -1,12 +1,10 @@ import logging import sys -import tempfile -from contextlib import contextmanager from logging import Logger from os import environ from pathlib import Path from types import TracebackType -from typing import Any, Callable, Generator +from typing import Callable import alembic.util.messaging @@ -20,11 +18,21 @@ @final class BLAlembic: DEFAULT_CONFIG_NAME: str = "alembic.ini" + LOG_LEVEL_NAME: str = "LOG_LEVEL" + ALLOW_OVERWRITE_NAME: str = "ALLOW_OVERWRITE" _run: Callable[[], None] _log: Logger + _allow_overwrite: bool = False - def __init__(self, argv: list[str] | None, logger: Logger) -> None: + @dataclass + class FileCopy: + source: Path + destination: Path + + def __init__( + self, argv: list[str] | None, logger: Logger, allow_overwrite: bool = False + ) -> None: """ _summary_ @@ -33,6 +41,7 @@ def __init__(self, argv: list[str] | None, logger: Logger) -> None: :param Logger logger: A logger for writing messages. """ self._log = logger + self._allow_overwrite = allow_overwrite if not argv: argv = sys.argv[1:] @@ -102,12 +111,12 @@ def _run_with_config(self, argv: list[str]) -> None: :param list[str] argv: The command line arguments to be parsed by ArgumentParser. :return None: """ - if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): - self._log.debug("Running `alembic` with modified command.") - with self._write_bl_alembic_config() as config_file: - argv = ["-c", config_file.name] + argv - else: - self._log.debug("Running `alembic` with discovered configuration file.") + # if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): + self._log.debug("Running `alembic` with modified command.") + self._write_bl_alembic_config() + argv = ["-c", BLAlembic.DEFAULT_CONFIG_NAME] + argv + # else: + # self._log.debug("Running `alembic` with discovered configuration file.") self._execute_alembic(argv) @@ -181,55 +190,61 @@ def _overwrite_alembic_env_files(self, config: Config) -> None: script_location = config.get_main_option("script_location") or "alembic" bl_python_alembic_file_dir = Path(__file__).resolve().parent - files = ( - ( + files = [ + BLAlembic.FileCopy( Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), - Path(script_location, f"{basename}.py"), + Path(Path.cwd(), Path(script_location, f"{basename}.py")), ) for basename in ["env", "env_setup"] - ) + ] - for file in files: - source_path, destination_path = file - if Path(destination_path).exists(): - self._log.warn( - f"The file '{destination_path}' already exists, but this is unexpected. Refusing to overwrite. Please report this problem." - ) - continue - - self._log.debug(f"Rewriting base Alembic files: {file}") - with ( - open(source_path, "r") as source, - open(destination_path, "w+b") as destination, - ): - destination.writelines(source.buffer) + self._log.debug(f"Rewriting base Alembic files: '{files}'") + # force the overwrite because Alembic creates the + # files that we want to replace. + self._copy_files(files, force_overwrite=True) - @contextmanager def _write_bl_alembic_config( self, - ) -> "Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]": # pyright: ignore[reportPrivateUsage] + ) -> None: """ Write the BL_Python Alembic tool's default configuration file to a temp file. :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. """ - with tempfile.NamedTemporaryFile("w+b") as temp_config_file: - self._log.debug(f"Temp file created at '{temp_config_file.name}'.") - with open( - Path(Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME), - "r", - ) as default_config_file: - self._log.debug( - f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' to temp file '{temp_config_file.name}'." + # need to _not_ use a temp file, and copy the default alembic.ini + # to the directory in which bl-alembic is executed. + self._log.debug( + f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." + ) + self._copy_files( + [ + BLAlembic.FileCopy( + Path( + Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME + ), + Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), ) - temp_config_file.writelines(default_config_file.buffer) + ] + ) - # the file will not be read correctly - # without seeking to the 0th byte - _ = temp_config_file.seek(0) + def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): + for file in files: + write_mode = "w+b" if self._allow_overwrite or force_overwrite else "x+b" + try: + with ( + open(file.source, "r") as source, + open(file.destination, write_mode) as destination, + ): + destination.writelines(source.buffer) + except FileExistsError as e: + if e.filename != str(file.destination): + raise - # yield so the temp file isn't deleted - yield temp_config_file + self._log.warn( + f"""The file '{file.destination}' already exists, but this is unexpected. Refusing to overwrite. + To use the default configuration, delete the existing file, + or set the envvar `{BLAlembic.ALLOW_OVERWRITE_NAME}=True`.""" + ) def _alembic_msg_capture( self, @@ -346,7 +361,9 @@ def run(self) -> None: def bl_alembic( - argv: list[str] | None = None, log_level: int | str | None = None + argv: list[str] | None = None, + log_level: int | str | None = None, + allow_overwrite: bool | None = None, ) -> None: """ A method to support the `bl-alembic` command, which replaces `alembic. @@ -356,13 +373,20 @@ def bl_alembic( """ logging.basicConfig(level=logging.INFO) if not log_level: - log_level = environ.get("LOG_LEVEL") + log_level = environ.get(BLAlembic.LOG_LEVEL_NAME) log_level = int(log_level) if log_level else logging.INFO logger = logging.getLogger() logger.setLevel(log_level) - bla = BLAlembic(argv, logger) + if allow_overwrite is None: + _allow_overwrite = environ.get(BLAlembic.ALLOW_OVERWRITE_NAME) + allow_overwrite = (_allow_overwrite.lower() if _allow_overwrite else None) in [ + "true", + "1", + ] + + bla = BLAlembic(argv, logger, allow_overwrite) bla.run() From ad9a23a94a12b69ab70ad8bb2f9ff016f7437a9c Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 15:06:21 -0700 Subject: [PATCH 30/55] Refactor BLAlembic into its own class. --- .../database/migrations/alembic/__main__.py | 357 +---------------- .../database/migrations/alembic/bl_alembic.py | 359 ++++++++++++++++++ 2 files changed, 360 insertions(+), 356 deletions(-) create mode 100644 src/database/BL_Python/database/migrations/alembic/bl_alembic.py diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index d6f50ae8..7de86406 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -1,363 +1,8 @@ import logging -import sys -from logging import Logger from os import environ -from pathlib import Path -from types import TracebackType -from typing import Callable - -import alembic.util.messaging # this is Alembic's main entry point -from alembic.config import CommandLine, Config -from alembic.config import main as alembic_main -from attr import dataclass -from typing_extensions import final - - -@final -class BLAlembic: - DEFAULT_CONFIG_NAME: str = "alembic.ini" - LOG_LEVEL_NAME: str = "LOG_LEVEL" - ALLOW_OVERWRITE_NAME: str = "ALLOW_OVERWRITE" - - _run: Callable[[], None] - _log: Logger - _allow_overwrite: bool = False - - @dataclass - class FileCopy: - source: Path - destination: Path - - def __init__( - self, argv: list[str] | None, logger: Logger, allow_overwrite: bool = False - ) -> None: - """ - _summary_ - - :param list[str] | None argv: The command line arguments to be parsed by ArgumentParser. - If None, this will use `sys.argv[1:]` to use CLI arguments. - :param Logger logger: A logger for writing messages. - """ - self._log = logger - self._allow_overwrite = allow_overwrite - - if not argv: - argv = sys.argv[1:] - - args = set(argv) - - if not args or "-h" in args or "--help" in args: - self._log.debug(f"Empty or 'help' args passed from Alembic: {args}") - self._run = lambda: self._run_with_alembic_defaults(argv) - elif "-c" in args or "--config" in args: - self._log.debug(f"'config' args passed from Alembic: {args}") - self._run = lambda: self._run_with_specified_config(argv) - else: - self._log.debug(f"Execution-only args passed from Alembic: {args}") - self._run = lambda: self._run_with_config(argv) - - def _get_config(self, argv: list[str]) -> Config: - """ - Get a parsed Alembic INI file as a `Config` object. - - :param list[str] argv: The command line arguments to be parsed by ArgumentParser. - :return Config: The `Config` object with options set from an INI file. - """ - # needs to open the config and return it - # so we can get the alembic migration directory - alembic_cli = CommandLine() - parsed_args = alembic_cli.parser.parse_args(argv) - self._log.debug(f"Parsed arguments: {parsed_args}") - config = Config(parsed_args.config) - self._log.debug(f"Instantiated config: {repr(config)}") - return config - - def _run_with_alembic_defaults(self, argv: list[str]) -> None: - """ - Calls `alembic` programmatically. - - Used when no command line arguments, or `-h` or `--help`, are specified. - - :param list[str] argv: The command line arguments to be parsed by ArgumentParser. - :return None: - """ - self._log.debug("Running unmodified `alembic` command.") - return alembic_main(argv) - - def _run_with_specified_config(self, argv: list[str]) -> None: - """ - Calls `alembic` programmatically. - - Used when `-c` or `--config` are specified. - - :param list[str] argv: The command line arguments to be parsed by ArgumentParser. - :return None: - """ - self._log.debug("Running unmodified `alembic` command.") - self._execute_alembic(argv) - - def _run_with_config(self, argv: list[str]) -> None: - """ - Calls `alembic` programmatically either: - - if the file 'alembic.ini' exists in the same working - directory in which the command is run. - - if the file 'alembic.ini' does not exist and after creating - a temporary configuration file from the BL_Python default Alembic - config, and forcing the temporary configuration file to be used - by `alembic`. - - :param list[str] argv: The command line arguments to be parsed by ArgumentParser. - :return None: - """ - # if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): - self._log.debug("Running `alembic` with modified command.") - self._write_bl_alembic_config() - argv = ["-c", BLAlembic.DEFAULT_CONFIG_NAME] + argv - # else: - # self._log.debug("Running `alembic` with discovered configuration file.") - - self._execute_alembic(argv) - - def _execute_alembic(self, argv: list[str]) -> None: - """ - Programmatically run `alembic`. - - :param list[str] argv: The command line arguments to be parsed by ArgumentParser. - :return None: - """ - config = self._get_config(argv) - - with self._initialize_alembic(config) as msg_capture: - try: - return alembic_main(argv) - except SystemExit as e: - self._log.error(e) - # If SystemExit is from anything other than - # needing to create the init dir, then crash. - # This is doable/reliable because Alembic first writes - # a message that the directory needs to be created, - # then calls `sys.exit(-1)`. - if not msg_capture.seen: - raise - - self._log.debug( - f"The Alembic initialization error was seen. Ignoring `{SystemExit.__name__}` exception." - ) - - def _initialize_alembic(self, config: Config): - """ - Set up Alembic to run `alembic init` programmatically if it is needed. - - :param Config config: The config, parsed from an Alembic INI configuration file. - :return MsgCaptureCtxManager: A type indicating whether an expected message was - written by Alembic. In the case of this method, if the "use the 'init'" - message is seen, then `alembic init` is executed. This type can be used to - determine whether `alembic init` was executed. - :return MsgCaptureCtxManager: - """ - script_location = config.get_main_option("script_location") or "alembic" - - def _msg_new(msg: Callable[[str, bool, bool, bool], None]): - nonlocal script_location - self._log.debug("Executing `alembic init`.") - msg( - "'alembic' migration directory does not exist. Creating it.", - # these bool values are defaults for Alembic msg function - True, - False, - False, - ) - alembic_main(["init", script_location]) - - self._overwrite_alembic_env_files(config) - - return self._alembic_msg_capture( - "use the 'init' command to create a new scripts folder", _msg_new - ) - - def _overwrite_alembic_env_files(self, config: Config) -> None: - """ - Overwrite env.py and env_setup.py in an Alembic migrations directory. - Currently this only runs if `alembic init` is executed, and care must - be taken if we intend to change this to overwrite the files if they exist. - The files will exist if `alembic init` was executed prior to this tool. - - :param Config config: The config, parsed from an Alembic INI configuration file. - :return None: - """ - script_location = config.get_main_option("script_location") or "alembic" - bl_python_alembic_file_dir = Path(__file__).resolve().parent - - files = [ - BLAlembic.FileCopy( - Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), - Path(Path.cwd(), Path(script_location, f"{basename}.py")), - ) - for basename in ["env", "env_setup"] - ] - - self._log.debug(f"Rewriting base Alembic files: '{files}'") - # force the overwrite because Alembic creates the - # files that we want to replace. - self._copy_files(files, force_overwrite=True) - - def _write_bl_alembic_config( - self, - ) -> None: - """ - Write the BL_Python Alembic tool's default configuration file to a temp file. - - :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. - """ - # need to _not_ use a temp file, and copy the default alembic.ini - # to the directory in which bl-alembic is executed. - self._log.debug( - f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." - ) - self._copy_files( - [ - BLAlembic.FileCopy( - Path( - Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME - ), - Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), - ) - ] - ) - - def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): - for file in files: - write_mode = "w+b" if self._allow_overwrite or force_overwrite else "x+b" - try: - with ( - open(file.source, "r") as source, - open(file.destination, write_mode) as destination, - ): - destination.writelines(source.buffer) - except FileExistsError as e: - if e.filename != str(file.destination): - raise - - self._log.warn( - f"""The file '{file.destination}' already exists, but this is unexpected. Refusing to overwrite. - To use the default configuration, delete the existing file, - or set the envvar `{BLAlembic.ALLOW_OVERWRITE_NAME}=True`.""" - ) - - def _alembic_msg_capture( - self, - msg_to_capture: str, - callback: Callable[[Callable[[str, bool, bool, bool], None]], None], - ): - """ - Capture a specific message written by Alembic, and call `callback` if it matches. - - This method override's Alembic's `msg` function and restores it when the - context is closed. - - :param str msg_to_capture: The specific message to monitor in Alembic's writes. - :param Callable[[Callable[[str, bool, bool, bool], None]], None] callback: - A callable that receives Alembic's `msg` function as a parameter. - :return MsgCaptureCtxManager: - """ - - OVERRIDDEN_ORIGINAL_ATTR_NAME = "_overridden_original" - if hasattr(alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME): - # if the attr exists that means we have already overriden it, - # so we set `_msg_original` to the real original. - self._log.debug( - f"`alembic.util.messaging.msg` has already been overwritten. Using `{OVERRIDDEN_ORIGINAL_ATTR_NAME}` attribute to get the original method." - ) - _msg_original = getattr( - alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME - ) - else: - self._log.debug( - f"`alembic.util.messaging.msg` has not been overridden. Using it as the original method." - ) - # if the attr does not exist, then we assume `msg` is - # the original Alembic `msg` function. - _msg_original: Callable[[str, bool, bool, bool], None] = ( - alembic.util.messaging.msg - ) - - @dataclass - class MessageSeen: - seen: bool = False - - @final - class MsgCaptureCtxManager: - _msg_seen: MessageSeen = MessageSeen() - _log: Logger - - def __init__(self, logger: Logger) -> None: - self._log = logger - - def __enter__(self) -> MessageSeen: - """ - Replace Alembic's `msg` function in order to execute - a callback when certain messages are seen. - - :return _type_: _description_ - """ - self._log.debug(f"Entering `{MsgCaptureCtxManager.__name__}` context.") - - def _msg_new( - msg: str, - newline: bool = True, - flush: bool = False, - quiet: bool = False, - ): - if msg_to_capture in msg: - self._log.debug( - f"The msg '{msg_to_capture}' was written by Alembic." - ) - callback(_msg_original) - self._msg_seen.seen = True - else: - _msg_original(msg, newline, flush, quiet) - - setattr( - _msg_new, OVERRIDDEN_ORIGINAL_ATTR_NAME, alembic.util.messaging.msg - ) - - self._log.debug( - f"Overwritting `alembic.util.messaging.msg` with `{repr(_msg_new)}`." - ) - alembic.util.messaging.msg = _msg_new - - return self._msg_seen - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool: - """ - Revert replacing Alembic's `msg` function by restoring the original. - - :param type[BaseException] | None exc_type: - :param BaseException | None exc_val: - :param TracebackType | None exc_tb: - :return bool: - """ - self._log.debug(f"Exiting `{MsgCaptureCtxManager.__name__}` context.") - alembic.util.messaging.msg = _msg_original - return True - - return MsgCaptureCtxManager(self._log) - - def run(self) -> None: - """ - Run Alembic migrations, initializing Alembic if necessary. - - :return None: - """ - self._log.debug("Bootstrapping and executing `alembic` process.") - return self._run() +from .bl_alembic import BLAlembic def bl_alembic( diff --git a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py new file mode 100644 index 00000000..deaab11e --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py @@ -0,0 +1,359 @@ +import sys +from logging import Logger +from pathlib import Path +from types import TracebackType +from typing import Callable + +import alembic.util.messaging + +# this is Alembic's main entry point +from alembic.config import CommandLine, Config +from alembic.config import main as alembic_main +from attr import dataclass +from typing_extensions import final + + +@final +class BLAlembic: + DEFAULT_CONFIG_NAME: str = "alembic.ini" + LOG_LEVEL_NAME: str = "LOG_LEVEL" + ALLOW_OVERWRITE_NAME: str = "ALLOW_OVERWRITE" + + _run: Callable[[], None] + _log: Logger + _allow_overwrite: bool = False + + @dataclass + class FileCopy: + source: Path + destination: Path + + def __init__( + self, argv: list[str] | None, logger: Logger, allow_overwrite: bool = False + ) -> None: + """ + _summary_ + + :param list[str] | None argv: The command line arguments to be parsed by ArgumentParser. + If None, this will use `sys.argv[1:]` to use CLI arguments. + :param Logger logger: A logger for writing messages. + """ + self._log = logger + self._allow_overwrite = allow_overwrite + + if not argv: + argv = sys.argv[1:] + + args = set(argv) + + if not args or "-h" in args or "--help" in args: + self._log.debug(f"Empty or 'help' args passed from Alembic: {args}") + self._run = lambda: self._run_with_alembic_defaults(argv) + elif "-c" in args or "--config" in args: + self._log.debug(f"'config' args passed from Alembic: {args}") + self._run = lambda: self._run_with_specified_config(argv) + else: + self._log.debug(f"Execution-only args passed from Alembic: {args}") + self._run = lambda: self._run_with_config(argv) + + def _get_config(self, argv: list[str]) -> Config: + """ + Get a parsed Alembic INI file as a `Config` object. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return Config: The `Config` object with options set from an INI file. + """ + # needs to open the config and return it + # so we can get the alembic migration directory + alembic_cli = CommandLine() + parsed_args = alembic_cli.parser.parse_args(argv) + self._log.debug(f"Parsed arguments: {parsed_args}") + config = Config(parsed_args.config) + self._log.debug(f"Instantiated config: {repr(config)}") + return config + + def _run_with_alembic_defaults(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when no command line arguments, or `-h` or `--help`, are specified. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") + return alembic_main(argv) + + def _run_with_specified_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when `-c` or `--config` are specified. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") + self._execute_alembic(argv) + + def _run_with_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically either: + - if the file 'alembic.ini' exists in the same working + directory in which the command is run. + - if the file 'alembic.ini' does not exist and after creating + a temporary configuration file from the BL_Python default Alembic + config, and forcing the temporary configuration file to be used + by `alembic`. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + # if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): + self._log.debug("Running `alembic` with modified command.") + self._write_bl_alembic_config() + argv = ["-c", BLAlembic.DEFAULT_CONFIG_NAME] + argv + # else: + # self._log.debug("Running `alembic` with discovered configuration file.") + + self._execute_alembic(argv) + + def _execute_alembic(self, argv: list[str]) -> None: + """ + Programmatically run `alembic`. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + config = self._get_config(argv) + + with self._initialize_alembic(config) as msg_capture: + try: + return alembic_main(argv) + except SystemExit as e: + self._log.error(e) + # If SystemExit is from anything other than + # needing to create the init dir, then crash. + # This is doable/reliable because Alembic first writes + # a message that the directory needs to be created, + # then calls `sys.exit(-1)`. + if not msg_capture.seen: + raise + + self._log.debug( + f"The Alembic initialization error was seen. Ignoring `{SystemExit.__name__}` exception." + ) + + def _initialize_alembic(self, config: Config): + """ + Set up Alembic to run `alembic init` programmatically if it is needed. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return MsgCaptureCtxManager: A type indicating whether an expected message was + written by Alembic. In the case of this method, if the "use the 'init'" + message is seen, then `alembic init` is executed. This type can be used to + determine whether `alembic init` was executed. + :return MsgCaptureCtxManager: + """ + script_location = config.get_main_option("script_location") or "alembic" + + def _msg_new(msg: Callable[[str, bool, bool, bool], None]): + nonlocal script_location + self._log.debug("Executing `alembic init`.") + msg( + "'alembic' migration directory does not exist. Creating it.", + # these bool values are defaults for Alembic msg function + True, + False, + False, + ) + alembic_main(["init", script_location]) + + self._overwrite_alembic_env_files(config) + + return self._alembic_msg_capture( + "use the 'init' command to create a new scripts folder", _msg_new + ) + + def _overwrite_alembic_env_files(self, config: Config) -> None: + """ + Overwrite env.py and env_setup.py in an Alembic migrations directory. + Currently this only runs if `alembic init` is executed, and care must + be taken if we intend to change this to overwrite the files if they exist. + The files will exist if `alembic init` was executed prior to this tool. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return None: + """ + script_location = config.get_main_option("script_location") or "alembic" + bl_python_alembic_file_dir = Path(__file__).resolve().parent + + files = [ + BLAlembic.FileCopy( + Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), + Path(Path.cwd(), Path(script_location, f"{basename}.py")), + ) + for basename in ["env", "env_setup"] + ] + + self._log.debug(f"Rewriting base Alembic files: '{files}'") + # force the overwrite because Alembic creates the + # files that we want to replace. + self._copy_files(files, force_overwrite=True) + + def _write_bl_alembic_config( + self, + ) -> None: + """ + Write the BL_Python Alembic tool's default configuration file to a temp file. + + :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. + """ + # need to _not_ use a temp file, and copy the default alembic.ini + # to the directory in which bl-alembic is executed. + self._log.debug( + f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." + ) + self._copy_files( + [ + BLAlembic.FileCopy( + Path( + Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME + ), + Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), + ) + ] + ) + + def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): + for file in files: + write_mode = "w+b" if self._allow_overwrite or force_overwrite else "x+b" + try: + with ( + open(file.source, "r") as source, + open(file.destination, write_mode) as destination, + ): + destination.writelines(source.buffer) + except FileExistsError as e: + if e.filename != str(file.destination): + raise + + self._log.warn( + f"""The file '{file.destination}' already exists, but this is unexpected. Refusing to overwrite. + To use the default configuration, delete the existing file, + or set the envvar `{BLAlembic.ALLOW_OVERWRITE_NAME}=True`.""" + ) + + def _alembic_msg_capture( + self, + msg_to_capture: str, + callback: Callable[[Callable[[str, bool, bool, bool], None]], None], + ): + """ + Capture a specific message written by Alembic, and call `callback` if it matches. + + This method override's Alembic's `msg` function and restores it when the + context is closed. + + :param str msg_to_capture: The specific message to monitor in Alembic's writes. + :param Callable[[Callable[[str, bool, bool, bool], None]], None] callback: + A callable that receives Alembic's `msg` function as a parameter. + :return MsgCaptureCtxManager: + """ + + OVERRIDDEN_ORIGINAL_ATTR_NAME = "_overridden_original" + if hasattr(alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME): + # if the attr exists that means we have already overriden it, + # so we set `_msg_original` to the real original. + self._log.debug( + f"`alembic.util.messaging.msg` has already been overwritten. Using `{OVERRIDDEN_ORIGINAL_ATTR_NAME}` attribute to get the original method." + ) + _msg_original = getattr( + alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME + ) + else: + self._log.debug( + f"`alembic.util.messaging.msg` has not been overridden. Using it as the original method." + ) + # if the attr does not exist, then we assume `msg` is + # the original Alembic `msg` function. + _msg_original: Callable[[str, bool, bool, bool], None] = ( + alembic.util.messaging.msg + ) + + @dataclass + class MessageSeen: + seen: bool = False + + @final + class MsgCaptureCtxManager: + _msg_seen: MessageSeen = MessageSeen() + _log: Logger + + def __init__(self, logger: Logger) -> None: + self._log = logger + + def __enter__(self) -> MessageSeen: + """ + Replace Alembic's `msg` function in order to execute + a callback when certain messages are seen. + + :return _type_: _description_ + """ + self._log.debug(f"Entering `{MsgCaptureCtxManager.__name__}` context.") + + def _msg_new( + msg: str, + newline: bool = True, + flush: bool = False, + quiet: bool = False, + ): + if msg_to_capture in msg: + self._log.debug( + f"The msg '{msg_to_capture}' was written by Alembic." + ) + callback(_msg_original) + self._msg_seen.seen = True + else: + _msg_original(msg, newline, flush, quiet) + + setattr( + _msg_new, OVERRIDDEN_ORIGINAL_ATTR_NAME, alembic.util.messaging.msg + ) + + self._log.debug( + f"Overwritting `alembic.util.messaging.msg` with `{repr(_msg_new)}`." + ) + alembic.util.messaging.msg = _msg_new + + return self._msg_seen + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + """ + Revert replacing Alembic's `msg` function by restoring the original. + + :param type[BaseException] | None exc_type: + :param BaseException | None exc_val: + :param TracebackType | None exc_tb: + :return bool: + """ + self._log.debug(f"Exiting `{MsgCaptureCtxManager.__name__}` context.") + alembic.util.messaging.msg = _msg_original + return True + + return MsgCaptureCtxManager(self._log) + + def run(self) -> None: + """ + Run Alembic migrations, initializing Alembic if necessary. + + :return None: + """ + self._log.debug("Bootstrapping and executing `alembic` process.") + # FIXME this ends up logging ERROR:root:-1 in some cases + return self._run() From 820e69aa07de41495f8e5f07087b6c5032772590 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 15:10:20 -0700 Subject: [PATCH 31/55] Remove commented code. --- .../BL_Python/database/migrations/alembic/bl_alembic.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py index deaab11e..896c0ef1 100644 --- a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py +++ b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py @@ -109,12 +109,9 @@ def _run_with_config(self, argv: list[str]) -> None: :param list[str] argv: The command line arguments to be parsed by ArgumentParser. :return None: """ - # if not Path(BLAlembic.DEFAULT_CONFIG_NAME).exists(): self._log.debug("Running `alembic` with modified command.") self._write_bl_alembic_config() argv = ["-c", BLAlembic.DEFAULT_CONFIG_NAME] + argv - # else: - # self._log.debug("Running `alembic` with discovered configuration file.") self._execute_alembic(argv) From 60dd4befd79e829eac2fd478bec970c2732c897a Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:02:22 -0700 Subject: [PATCH 32/55] Add some tests for BLAlembic. --- .../migrations/alembic/test_bl_alembic.py | 193 ++++++++++++++++++ 1 file changed, 193 insertions(+) create mode 100644 src/database/test/unit/migrations/alembic/test_bl_alembic.py diff --git a/src/database/test/unit/migrations/alembic/test_bl_alembic.py b/src/database/test/unit/migrations/alembic/test_bl_alembic.py new file mode 100644 index 00000000..fd5dece6 --- /dev/null +++ b/src/database/test/unit/migrations/alembic/test_bl_alembic.py @@ -0,0 +1,193 @@ +from typing import Any, Generator, Protocol +from unittest.mock import AsyncMock, MagicMock, NonCallableMagicMock + +import alembic +import alembic.util +import pytest +from BL_Python.database.migrations.alembic.bl_alembic import BLAlembic +from mock import MagicMock +from pytest_mock import MockerFixture +from pytest_mock.plugin import MockType + + +class MockArgv(Protocol): + def __call__(self, args: list[str]) -> MockType: ... + + +@pytest.fixture +def mock_argv(mocker: MockerFixture) -> Generator[MockArgv, Any, None]: + argv = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.sys.argv") + + def set_args(args: list[str]): + argv.__getitem__ = ( + lambda _argv, _slice: (["bl-alembic"] + args)[_slice] # pyright: ignore[reportUnknownLambdaType] + ) + return argv + + yield set_args + + +def mock_alembic(mocker: MockerFixture): + return mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.alembic_main") + + +def mock_config(mocker: MockerFixture): + return mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + + +@pytest.mark.parametrize( + "args", + [ + [], + ["-h"], + ["--help"], + ["-c", "test-config.ini"], + ["--config", "test-config.ini"], + ["-c", "alembic.ini", "upgrade", "head"], + ], +) +def test__BLAlembic__passes_through_to_alembic_with_correct_args( + args: list[str], mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(args) + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + alembic_main = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.alembic_main" + ) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert alembic_main.called + alembic_main.assert_called_once_with(args) + + +def test__BLAlembic__passes_through_to_alembic_with_default_config_when_not_specified( + mock_argv: MockArgv, + mocker: MockerFixture, +): + args = ["upgrade", "head"] + _ = mock_argv(args) + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + alembic_main = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.alembic_main" + ) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert alembic_main.called + alembic_main.assert_called_once_with(["-c", BLAlembic.DEFAULT_CONFIG_NAME] + args) + + +def test__BLAlembic__creates_default_config(mock_argv: MockArgv, mocker: MockerFixture): + _ = mock_alembic(mocker) + _ = mock_config(mocker) + _ = mock_argv(["upgrade", "head"]) + + def path_se(*args: Any, **kwargs: Any): + # set the call args for the Path mocks that are passed + # into the FileCopy mock so we can examine them when FileCopy + # is called + return MagicMock(args=args) + + def file_copy_se(*args: Any, **kwargs: Any): + # set a mocked FileCopy whose src/dest are strings (filenames) + return MagicMock(source=args[0].args[1], destination=args[1].args[1]) + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", side_effect=path_se + ) + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.BLAlembic.FileCopy", + side_effect=file_copy_se, + ) + open_mock = mocker.patch("builtins.open", mocker.mock_open()) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert open_mock.called + call_args = [call[0] for call in open_mock.call_args_list] + assert (BLAlembic.DEFAULT_CONFIG_NAME, "r") in call_args + assert (BLAlembic.DEFAULT_CONFIG_NAME, "x+b") in call_args + + +def test__BLAlembic__does_not_overwrite_existing_config( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_alembic(mocker) + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + open_mock = mocker.patch("builtins.open", mocker.mock_open()) + + def raise_file_exists_error(*args: Any, **kwargs: Any): + raise FileExistsError() + + open_mock.side_effect = raise_file_exists_error + + with pytest.raises(FileExistsError): + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + +def test__BLAlembic__initializes_alembic_if_not_already_initialized( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + _ = mocker.patch("builtins.open", mocker.mock_open()) + + _ = mock_config(mocker) + _mock_alembic = mock_alembic(mocker) + + def write_init_message(*args: Any, **kwargs: Any): + _mock_alembic.side_effect = None + alembic.util.messaging.msg( + "use the 'init' command to create a new scripts folder" + ) + + _mock_alembic.side_effect = write_init_message + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert "init" in [call[0][0][0] for call in _mock_alembic.call_args_list] + + +def test__BLAlembic__initializes_alembic_into_correct_directory_if_not_already_initialized( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + _ = mocker.patch("builtins.open", mocker.mock_open()) + _mock_alembic = mock_alembic(mocker) + + # get_main_option_mock = MagicMock() + def get_main_option(option: str): + if option == "script_location": + return "migrations/" + return MagicMock() + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Config", + return_value=MagicMock(get_main_option=get_main_option), + ) + + def write_init_message(*args: Any, **kwargs: Any): + _mock_alembic.side_effect = None + alembic.util.messaging.msg( + "use the 'init' command to create a new scripts folder" + ) + + _mock_alembic.side_effect = write_init_message + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert ["init", "migrations/"] in [ + call[0][0] for call in _mock_alembic.call_args_list + ] From e1fbb164fb0cbc15b9e5aa4f66d3d7c63cdf8543 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:13:21 -0700 Subject: [PATCH 33/55] Fix error with injecting wrong Config type. - Also fix the connection string used by Alembic --- src/database/BL_Python/database/migrations/alembic/env.py | 5 +++-- .../BL_Python/database/migrations/alembic/env_setup.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/env.py b/src/database/BL_Python/database/migrations/alembic/env.py index d0d89399..0a86eac7 100644 --- a/src/database/BL_Python/database/migrations/alembic/env.py +++ b/src/database/BL_Python/database/migrations/alembic/env.py @@ -15,9 +15,10 @@ def run_migrations(bases: list[MetaBase], config_filename: Path | None = None): config_type = ConfigBuilder[Config]().with_root_config(Config).build() config = load_config(config_type, config_filename) - config_module = ConfigModule(config, DatabaseConfig) + config_module = ConfigModule(config, Config) + database_config_module = ConfigModule(config.database, DatabaseConfig) - ioc_container = Injector(config_module) + ioc_container = Injector([config_module, database_config_module]) alembic_env = ioc_container.create_object(AlembicEnvSetup) diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py index 751049a2..90897d84 100644 --- a/src/database/BL_Python/database/migrations/alembic/env_setup.py +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -86,7 +86,7 @@ def get_config(self): # FIXME why is this here? config.set_main_option( "sqlalchemy.url", - "sqlite:///.app.db", + self._config.connection_string ) return config From 6ec57f9401a2dc1a21bc6697df968bc9d5d78361 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:17:52 -0700 Subject: [PATCH 34/55] Fix type errors. --- src/database/test/unit/migrations/alembic/test_bl_alembic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database/test/unit/migrations/alembic/test_bl_alembic.py b/src/database/test/unit/migrations/alembic/test_bl_alembic.py index fd5dece6..0bcef6de 100644 --- a/src/database/test/unit/migrations/alembic/test_bl_alembic.py +++ b/src/database/test/unit/migrations/alembic/test_bl_alembic.py @@ -1,5 +1,5 @@ from typing import Any, Generator, Protocol -from unittest.mock import AsyncMock, MagicMock, NonCallableMagicMock +from unittest.mock import MagicMock import alembic import alembic.util From 992ed1215a68143492c576d58a8268040fe7b3eb Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:22:38 -0700 Subject: [PATCH 35/55] Ruff reformat caused by not using `--preview` in VSCode. --- .../database/migrations/alembic/bl_alembic.py | 16 ++++++---------- .../database/migrations/alembic/env_setup.py | 5 +---- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py index 896c0ef1..d5998f8e 100644 --- a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py +++ b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py @@ -211,16 +211,12 @@ def _write_bl_alembic_config( self._log.debug( f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." ) - self._copy_files( - [ - BLAlembic.FileCopy( - Path( - Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME - ), - Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), - ) - ] - ) + self._copy_files([ + BLAlembic.FileCopy( + Path(Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME), + Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), + ) + ]) def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): for file in files: diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py index 90897d84..aae1b8ad 100644 --- a/src/database/BL_Python/database/migrations/alembic/env_setup.py +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -84,10 +84,7 @@ def get_config(self): fileConfig(config.config_file_name) # FIXME why is this here? - config.set_main_option( - "sqlalchemy.url", - self._config.connection_string - ) + config.set_main_option("sqlalchemy.url", self._config.connection_string) return config From bd5c0a775a77b3f1eb3af70092b54eb4a6df4f51 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:28:22 -0700 Subject: [PATCH 36/55] Remove bad variable name. --- src/database/BL_Python/database/migrations/alembic/__main__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 7de86406..61b0a807 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -31,8 +31,7 @@ def bl_alembic( "1", ] - bla = BLAlembic(argv, logger, allow_overwrite) - bla.run() + BLAlembic(argv, logger, allow_overwrite).run() if __name__ == "__main__": From cc0d2ae0d279031b32f631aeb0bd548905face99 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:33:01 -0700 Subject: [PATCH 37/55] Add TODO comment for commented code. --- src/database/BL_Python/database/migrations/alembic/env_setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py index aae1b8ad..7d30c3f5 100644 --- a/src/database/BL_Python/database/migrations/alembic/env_setup.py +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -64,6 +64,7 @@ def __init__(self, config: DatabaseConfig) -> None: @lru_cache(maxsize=1) def get_config(self): + # TODO re-integrate AWS SSM at a later time # aws_ssm_config = ConfigParser() # loaded_config_files = aws_ssm_config.read("aws-ssm.ini") # if loaded_config_files: From fb9a13b94d95a05b006908e0a3139ca88eb2ce47 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Fri, 22 Mar 2024 17:33:14 -0700 Subject: [PATCH 38/55] Fix incorrect string for exception in SQL schema setup. --- src/database/BL_Python/database/schema/postgresql.py | 2 +- src/database/BL_Python/database/schema/sqlite.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/database/BL_Python/database/schema/postgresql.py b/src/database/BL_Python/database/schema/postgresql.py index 5b2a8ff6..3a3609e7 100644 --- a/src/database/BL_Python/database/schema/postgresql.py +++ b/src/database/BL_Python/database/schema/postgresql.py @@ -14,7 +14,7 @@ class PostgreSQLDialect(DialectBase): def __init__(self, dialect: Dialect) -> None: if dialect.name != PostgreSQLDialect.DIALECT_NAME: raise ValueError( - f"Invalid Dialect with name `{dialect.name}` provided for `{PostgreSQLDialect.__name__}`. Expected `sqlite`." + f"Invalid Dialect with name `{dialect.name}` provided for `{PostgreSQLDialect.__name__}`. Expected `{self.DIALECT_NAME}`." ) self._dialect = dialect diff --git a/src/database/BL_Python/database/schema/sqlite.py b/src/database/BL_Python/database/schema/sqlite.py index 6885e52c..f62972ba 100644 --- a/src/database/BL_Python/database/schema/sqlite.py +++ b/src/database/BL_Python/database/schema/sqlite.py @@ -14,7 +14,7 @@ class SQLiteDialect(DialectBase): def __init__(self, dialect: Dialect) -> None: if dialect.name != SQLiteDialect.DIALECT_NAME: raise ValueError( - f"Invalid Dialect with name `{dialect.name}` provided for `{SQLiteDialect.__name__}`. Expected `sqlite`." + f"Invalid Dialect with name `{dialect.name}` provided for `{SQLiteDialect.__name__}`. Expected `{self.DIALECT_NAME}`." ) self._dialect = dialect From b526c3c24a4a3f8b366c016cd1d0994956cd34d2 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 14:39:37 -0700 Subject: [PATCH 39/55] Add newline. --- src/database/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database/MANIFEST.in b/src/database/MANIFEST.in index b8efaa18..795e1fd6 100644 --- a/src/database/MANIFEST.in +++ b/src/database/MANIFEST.in @@ -1,3 +1,3 @@ graft BL_Python/database/migrations/alembic/ global-include *.pyi -global-include py.typed \ No newline at end of file +global-include py.typed From 9bd91a0871e4d975e24c0ade0b47d105d66edf3f Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 12:12:05 -0700 Subject: [PATCH 40/55] Add SAST scanner Bandit. --- .github/workflows/CICD.yaml | 48 +++++++++++++++++++++++++++++++++++++ Makefile | 26 +++++++++++++++++++- pyproject.toml | 18 +++++++++++++- 3 files changed, 90 insertions(+), 2 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 656196fd..696b29b2 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -186,6 +186,53 @@ jobs: retention-days: 1 if-no-files-found: error + Bandit: + name: SAST scanning + runs-on: ubuntu-latest + needs: + - Checkout + + env: + PYTHON_VERSION: "3.10" + + if: ${{( success() && !cancelled() ) }} + + # FIXME Ignore errors while testing Bandit + continue-on-error: true + + steps: + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + id: install_python + with: + python-version: ${{ env.PYTHON_VERSION }} + + - uses: actions/cache/restore@v4 + name: Restore run cache + id: restore-run-cache + with: + key: ${{ needs.Checkout.outputs.cache-key-run }} + path: ${{ github.workspace }} + fail-on-cache-miss: true + + - name: Run bandit scan and generate reports + run: | + echo Running bandit + + VENV=.github-venv \ + PYTEST_FLAGS="-k 'not acceptance'" \ + make test-bandit + + - name: Output bandit report + uses: actions/upload-artifact@v4 + if: ${{ always() }} + with: + name: pytest-and-coverage-report + path: | + bandit.sarif + retention-days: 1 + if-no-files-found: error + Style: name: Style and formatting runs-on: ubuntu-latest @@ -229,6 +276,7 @@ jobs: - Checkout - Pyright - Pytest + - Bandit - Style # this job should run regardless of success, failure, or skips, # but not if the workflow is cancelled. `always()` ignores cancelled, diff --git a/Makefile b/Makefile index 9245b638..3e293af3 100644 --- a/Makefile +++ b/Makefile @@ -27,10 +27,14 @@ GITHUB_WORKSPACE ?= $(CURDIR) # `testpypi` and `pypi` are valid values. PYPI_REPO ?= testpypi +<<<<<<< HEAD # The directory to write ephermal reports to, # such as pytest coverage reports. REPORTS_DIR ?= reports +======= +BANDIT_REPORT := bandit.sarif +>>>>>>> 0f3e052 (Add SAST scanner Bandit.) # Can be overridden. This is used to change the prereqs # of some supporting targets, like `format-ruff`. @@ -204,6 +208,17 @@ test-pyright : $(VENV) $(DEFAULT_TARGET) endif endif +test-bandit : $(VENV) $(DEFAULT_TARGET) + $(ACTIVATE_VENV) + + bandit -c pyproject.toml \ + --format sarif \ + --output $(BANDIT_REPORT) \ + -r . || BANDIT_EXIT_CODE=$$? + # don't exit with an error + # while testing bandit. + echo "Bandit exit code: $$BANDIT_EXIT_CODE" + test-pytest : $(VENV) $(DEFAULT_TARGET) $(ACTIVATE_VENV) @@ -217,7 +232,7 @@ test-pytest : $(VENV) $(DEFAULT_TARGET) exit $$PYTEST_EXIT_CODE .PHONY: test test-pytest test-pyright test-ruff test-isort -_test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-pytest +_test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-bandit test-pytest test : CMD_PREFIX=@ test : clean-test $(MAKE) -j --keep-going _test @@ -247,8 +262,17 @@ clean-build : \) -prune -exec rm -rf {} \; clean-test : +<<<<<<< HEAD $(CMD_PREFIX)rm -rf \ $(REPORTS_DIR)/pytest +======= + $(CMD_PREFIX)rm -rf cov.xml \ + pytest.xml \ + coverage \ + .coverage \ + $(BANDIT_REPORT) + +>>>>>>> 0f3e052 (Add SAST scanner Bandit.) .PHONY: clean clean-test clean-build clean : clean-build clean-test diff --git a/pyproject.toml b/pyproject.toml index b57e4611..6ad06880 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,8 @@ dev-dependencies = [ "junit2html ~= 30.1", "pyright ~= 1.1", "isort ~= 5.13", - "ruff ~= 0.3" + "ruff ~= 0.3", + "bandit[sarif,toml] ~= 1.7" ] [tool.pyright] @@ -215,6 +216,21 @@ omit = [ ] branch = true +[tool.bandit] +exclude_dirs = [ + "./build/*", + "./.github-venv/*", + "./.pytest_cache/*", + "./typings/*", + "./node_modules/*", + "./__pycache__/*", + "./.github/*", + "./.venv/*", + "./.git/*", + "./test/*/test*.py", + "./src/*/test/*/test*.py" +] + [tool.ruff] include = [ "pyproject.toml", From a95fca4cf266535b7d7ef0234e4c4a5dd3a5edd4 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 12:32:20 -0700 Subject: [PATCH 41/55] Fix name of SAST report CICD step. --- .github/workflows/CICD.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 696b29b2..e853aec9 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -227,7 +227,7 @@ jobs: uses: actions/upload-artifact@v4 if: ${{ always() }} with: - name: pytest-and-coverage-report + name: bandit-sast-report path: | bandit.sarif retention-days: 1 From 51e3a75a3f35abbe1da812dd1c4052170265b277 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 13:16:33 -0700 Subject: [PATCH 42/55] Force CICD mode in Makefile during CICD. --- .github/workflows/CICD.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index e853aec9..ea144bb5 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -140,6 +140,7 @@ jobs: VENV=.github-venv \ PYRIGHT_MODE=npm \ + DEFAULT_TARGET=cicd \ make test-pyright Pytest: @@ -174,6 +175,7 @@ jobs: VENV=.github-venv \ PYTEST_FLAGS="-k 'not acceptance'" \ + DEFAULT_TARGET=cicd \ make test-pytest - name: Output pytest report @@ -221,6 +223,7 @@ jobs: VENV=.github-venv \ PYTEST_FLAGS="-k 'not acceptance'" \ + DEFAULT_TARGET=cicd \ make test-bandit - name: Output bandit report @@ -262,11 +265,13 @@ jobs: - name: Check code style run: | VENV=.github-venv \ + DEFAULT_TARGET=cicd \ make test-ruff - name: Check import order run: | VENV=.github-venv \ + DEFAULT_TARGET=cicd \ make test-isort Final-status-check: From e8069d8b69245777d647bfcd4b27f36d11259b4d Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 13:39:59 -0700 Subject: [PATCH 43/55] Add CodeQL upload to SAST scan. --- .github/workflows/CICD.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index ea144bb5..3d777153 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -226,7 +226,7 @@ jobs: DEFAULT_TARGET=cicd \ make test-bandit - - name: Output bandit report + - name: Output bandit report artifact uses: actions/upload-artifact@v4 if: ${{ always() }} with: @@ -236,6 +236,11 @@ jobs: retention-days: 1 if-no-files-found: error + - name: Upload bandit report to CodeQL + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: bandit.sarif + Style: name: Style and formatting runs-on: ubuntu-latest From e5f70772bdac9b00541a984390675a4b5bb592ed Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 15:50:27 -0700 Subject: [PATCH 44/55] Fix problem with sometimes out-of-order make target executions. --- Makefile | 117 ++++++++++++++++++++++++++++++------------------------- 1 file changed, 64 insertions(+), 53 deletions(-) diff --git a/Makefile b/Makefile index 3e293af3..7442d8d1 100644 --- a/Makefile +++ b/Makefile @@ -27,14 +27,10 @@ GITHUB_WORKSPACE ?= $(CURDIR) # `testpypi` and `pypi` are valid values. PYPI_REPO ?= testpypi -<<<<<<< HEAD # The directory to write ephermal reports to, # such as pytest coverage reports. REPORTS_DIR ?= reports -======= -BANDIT_REPORT := bandit.sarif ->>>>>>> 0f3e052 (Add SAST scanner Bandit.) # Can be overridden. This is used to change the prereqs # of some supporting targets, like `format-ruff`. @@ -43,12 +39,16 @@ BANDIT_REPORT := bandit.sarif DEFAULT_TARGET ?= dev .DEFAULT_GOAL = $(DEFAULT_TARGET) -define assign_default_target - DEFAULT_TARGET := $(1) -endef +#define assign_default_target +# DEFAULT_TARGET := $(1) +#endef ifeq ($(DEFAULT_TARGET),dev) + CONFIGURE_TARGET := _dev_configure + BUILD_TARGET := _dev_build else ifeq ($(DEFAULT_TARGET),cicd) + CONFIGURE_TARGET := _cicd_configure + BUILD_TARGET := _cicd_build else $(error DEFAULT_TARGET must be one of "dev" or "cicd") endif @@ -76,15 +76,35 @@ endef PYPROJECT_FILES=./pyproject.toml $(wildcard src/*/pyproject.toml) PACKAGE_PATHS=$(subst /pyproject.toml,,$(PYPROJECT_FILES)) -PACKAGES=BL_Python.all $(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) +#PACKAGES=BL_Python.all $(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) +PACKAGES=$(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) .PHONY: dev # Rather than duplicating BL_Python.all, # just prereq it. -dev : dev_mode BL_Python.all +dev : $(VENV) $(SETUP_DEPENDENCIES) + $(MAKE) _dev_build DEFAULT_TARGET=dev +_dev_configure: $(VENV) $(PYPROJECT_FILES) #BL_Python.all +_dev_build : _dev_configure + @if [ -d $(call package_to_dist,all) ]; then + echo "Package $@ is already built, skipping..." + else + $(ACTIVATE_VENV) + + pip install -e .[dev-dependencies] +# By default, psycopg2 is not installed +# but it should be for development + pip install -e src/database[postgres-binary] + + rm -rf $(PACKAGE_INSTALL_DIR) + fi + + @$(REPORT_VENV_USAGE) -.PHONY: cicd -cicd : cicd_mode $(VENV) $(PYPROJECT_FILES) +cicd : $(VENV) $(SETUP_DEPENDENCIES) + $(MAKE) _cicd_build DEFAULT_TARGET=cicd +_cicd_configure: $(VENV) $(PYPROJECT_FILES) +_cicd_build : _cicd_configure @if [ -f $(call package_to_inst,) ]; then echo "Package is already built, skipping..." else @@ -98,32 +118,33 @@ cicd : cicd_mode $(VENV) $(PYPROJECT_FILES) @$(REPORT_VENV_USAGE) -MODES=dev_mode cicd_mode -# Used to force DEFAULT_TARGET to whatever -# the actual .DEFAULT_GOAL is. -$(MODES): - @echo $(call assign_default_target,$(subst _mode,,$@)) +#MODES=dev_mode cicd_mode +## Used to force DEFAULT_TARGET to whatever +## the actual .DEFAULT_GOAL is. +#$(MODES): +# @echo $(call assign_default_target,$(subst _mode,,$@)) # BL_Python.all does not have a src/%/pyproject.toml # prereq because its pyproject.toml is at / -BL_Python.all: $(VENV) $(PYPROJECT_FILES) - @if [ -d $(call package_to_dist,all) ]; then - echo "Package $@ is already built, skipping..." - else - $(ACTIVATE_VENV) - - pip install -e .[dev-dependencies] -# By default, psycopg2 is not installed -# but it should be for development - pip install -e src/database[postgres-binary] - - rm -rf $(PACKAGE_INSTALL_DIR) - fi - - @$(REPORT_VENV_USAGE) - -$(filter-out BL_Python.all, $(PACKAGES)): BL_Python.%: src/%/pyproject.toml $(VENV) +#BL_Python.all: $(VENV) $(PYPROJECT_FILES) +# @if [ -d $(call package_to_dist,all) ]; then +# echo "Package $@ is already built, skipping..." +# else +# $(ACTIVATE_VENV) +# +# pip install -e .[dev-dependencies] +## By default, psycopg2 is not installed +## but it should be for development +# pip install -e src/database[postgres-binary] +# +# rm -rf $(PACKAGE_INSTALL_DIR) +# fi +# +# @$(REPORT_VENV_USAGE) + +#$(filter-out BL_Python.all, $(PACKAGES)): BL_Python.%: src/%/pyproject.toml $(VENV) +$(PACKAGES): BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYPROJECT_FILES) @if [ -d $(call package_to_dist,$*) ]; then @echo "Package $@ is already built, skipping..." else @@ -168,31 +189,30 @@ $(VENV) : pip install -U pip -format-isort : $(VENV) $(DEFAULT_TARGET) +format-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) isort src -format-ruff : $(VENV) $(DEFAULT_TARGET) +format-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) ruff format --preview --respect-gitignore -.PHONY: format format-ruff format-isort -format : $(VENV) $(DEFAULT_TARGET) format-isort format-ruff +format : $(VENV) $(BUILD_TARGET) format-isort format-ruff -test-isort : $(VENV) $(DEFAULT_TARGET) +test-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) isort --check-only src -test-ruff : $(VENV) $(DEFAULT_TARGET) +test-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) ruff format --preview --respect-gitignore --check -test-pyright : $(VENV) $(DEFAULT_TARGET) +test-pyright : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) ifeq "$(PYRIGHT_MODE)" "pip" @@ -208,7 +228,7 @@ test-pyright : $(VENV) $(DEFAULT_TARGET) endif endif -test-bandit : $(VENV) $(DEFAULT_TARGET) +test-bandit : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) bandit -c pyproject.toml \ @@ -217,9 +237,9 @@ test-bandit : $(VENV) $(DEFAULT_TARGET) -r . || BANDIT_EXIT_CODE=$$? # don't exit with an error # while testing bandit. - echo "Bandit exit code: $$BANDIT_EXIT_CODE" + @echo "Bandit exit code: $$BANDIT_EXIT_CODE" -test-pytest : $(VENV) $(DEFAULT_TARGET) +test-pytest : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) pytest $(PYTEST_FLAGS) \ @@ -232,7 +252,7 @@ test-pytest : $(VENV) $(DEFAULT_TARGET) exit $$PYTEST_EXIT_CODE .PHONY: test test-pytest test-pyright test-ruff test-isort -_test : $(VENV) $(DEFAULT_TARGET) test-isort test-ruff test-pyright test-bandit test-pytest +_test : $(VENV) $(BUILD_TARGET) test-isort test-ruff test-pyright test-bandit test-pytest test : CMD_PREFIX=@ test : clean-test $(MAKE) -j --keep-going _test @@ -262,17 +282,8 @@ clean-build : \) -prune -exec rm -rf {} \; clean-test : -<<<<<<< HEAD $(CMD_PREFIX)rm -rf \ $(REPORTS_DIR)/pytest -======= - $(CMD_PREFIX)rm -rf cov.xml \ - pytest.xml \ - coverage \ - .coverage \ - $(BANDIT_REPORT) - ->>>>>>> 0f3e052 (Add SAST scanner Bandit.) .PHONY: clean clean-test clean-build clean : clean-build clean-test From 24aec37f7a367a0b62fd949513702b6ef14b851c Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Tue, 26 Mar 2024 16:50:04 -0700 Subject: [PATCH 45/55] Fix problem caused by venv symlinks. --- .github/workflows/CICD.yaml | 1 - Makefile | 69 +++++++++++++++++-------------------- 2 files changed, 31 insertions(+), 39 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 3d777153..3bac26b9 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -222,7 +222,6 @@ jobs: echo Running bandit VENV=.github-venv \ - PYTEST_FLAGS="-k 'not acceptance'" \ DEFAULT_TARGET=cicd \ make test-bandit diff --git a/Makefile b/Makefile index 7442d8d1..cd7f6b05 100644 --- a/Makefile +++ b/Makefile @@ -39,9 +39,6 @@ REPORTS_DIR ?= reports DEFAULT_TARGET ?= dev .DEFAULT_GOAL = $(DEFAULT_TARGET) -#define assign_default_target -# DEFAULT_TARGET := $(1) -#endef ifeq ($(DEFAULT_TARGET),dev) CONFIGURE_TARGET := _dev_configure @@ -55,6 +52,7 @@ endif ACTIVATE_VENV := . $(VENV)/bin/activate +PREFIX_VENV := PATH=$(CURDIR)/$(VENV)/bin:$$PATH REPORT_VENV_USAGE := echo '\nActivate your venv with `. $(VENV)/bin/activate`' PACKAGE_INSTALL_DIR := $(VENV)/lib/python*/site-packages/BL_Python @@ -76,7 +74,6 @@ endef PYPROJECT_FILES=./pyproject.toml $(wildcard src/*/pyproject.toml) PACKAGE_PATHS=$(subst /pyproject.toml,,$(PYPROJECT_FILES)) -#PACKAGES=BL_Python.all $(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) PACKAGES=$(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) .PHONY: dev @@ -84,16 +81,18 @@ PACKAGES=$(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pypro # just prereq it. dev : $(VENV) $(SETUP_DEPENDENCIES) $(MAKE) _dev_build DEFAULT_TARGET=dev -_dev_configure: $(VENV) $(PYPROJECT_FILES) #BL_Python.all +_dev_configure : $(VENV) $(PYPROJECT_FILES) _dev_build : _dev_configure @if [ -d $(call package_to_dist,all) ]; then echo "Package $@ is already built, skipping..." else $(ACTIVATE_VENV) + $(PREFIX_VENV) \ pip install -e .[dev-dependencies] # By default, psycopg2 is not installed # but it should be for development + $(PREFIX_VENV) \ pip install -e src/database[postgres-binary] rm -rf $(PACKAGE_INSTALL_DIR) @@ -103,56 +102,34 @@ _dev_build : _dev_configure cicd : $(VENV) $(SETUP_DEPENDENCIES) $(MAKE) _cicd_build DEFAULT_TARGET=cicd -_cicd_configure: $(VENV) $(PYPROJECT_FILES) +_cicd_configure : $(VENV) $(PYPROJECT_FILES) _cicd_build : _cicd_configure @if [ -f $(call package_to_inst,) ]; then echo "Package is already built, skipping..." else $(ACTIVATE_VENV) + $(PREFIX_VENV) \ pip install .[dev-dependencies] # By default, psycopg2 is not installed # but it should be for CI/CD + $(PREFIX_VENV) \ pip install src/database[postgres-binary] fi @$(REPORT_VENV_USAGE) -#MODES=dev_mode cicd_mode -## Used to force DEFAULT_TARGET to whatever -## the actual .DEFAULT_GOAL is. -#$(MODES): -# @echo $(call assign_default_target,$(subst _mode,,$@)) - - -# BL_Python.all does not have a src/%/pyproject.toml -# prereq because its pyproject.toml is at / -#BL_Python.all: $(VENV) $(PYPROJECT_FILES) -# @if [ -d $(call package_to_dist,all) ]; then -# echo "Package $@ is already built, skipping..." -# else -# $(ACTIVATE_VENV) -# -# pip install -e .[dev-dependencies] -## By default, psycopg2 is not installed -## but it should be for development -# pip install -e src/database[postgres-binary] -# -# rm -rf $(PACKAGE_INSTALL_DIR) -# fi -# -# @$(REPORT_VENV_USAGE) - -#$(filter-out BL_Python.all, $(PACKAGES)): BL_Python.%: src/%/pyproject.toml $(VENV) -$(PACKAGES): BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYPROJECT_FILES) +$(PACKAGES) : BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYPROJECT_FILES) @if [ -d $(call package_to_dist,$*) ]; then @echo "Package $@ is already built, skipping..." else $(ACTIVATE_VENV) if [ "$@" = "BL_Python.database" ]; then + $(PREFIX_VENV) \ pip install -e $(dir $<)[postgres-binary] else + $(PREFIX_VENV) \ pip install -e $(dir $<) fi @@ -165,16 +142,20 @@ $(PACKAGES): BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYP SETUP_DEPENDENCIES=$(call dep_to_venv_path,toml/__init__.py) $(call dep_to_venv_path,typing_extensions.py) $(call dep_to_venv_path,toml/__init__.py): $(VENV) $(ACTIVATE_VENV) - pip install toml + + $(PREFIX_VENV) pip install toml $(call dep_to_venv_path,typing_extensions.py): $(VENV) $(ACTIVATE_VENV) + + $(PREFIX_VENV) \ pip install typing_extensions $(PACKAGE_PATHS) : $(VENV) $(SETUP_DEPENDENCIES) $(PYPROJECT_FILES) : $(VENV) $(SETUP_DEPENDENCIES) $(ACTIVATE_VENV) + $(PREFIX_VENV) \ REWRITE_DEPENDENCIES=$(REWRITE_DEPENDENCIES) \ GITHUB_REF=$(GITHUB_REF) \ GITHUB_WORKSPACE=$(GITHUB_WORKSPACE) \ @@ -184,19 +165,25 @@ $(PYPROJECT_FILES) : $(VENV) $(SETUP_DEPENDENCIES) $(VENV) : test -d $(VENV) || env python$(PYTHON_VERSION) -m venv $(VENV) +# fix Python symlink that is wrong on GitHub Actions for some reason + ln -sf $(which python$(PYTHON_VERSION)) $(VENV)/bin/python$(PYTHON_VERSION) + $(ACTIVATE_VENV) + $(PREFIX_VENV) \ pip install -U pip format-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - isort src + $(PREFIX_VENV) \ + isort src format-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) + $(PREFIX_VENV) \ ruff format --preview --respect-gitignore format : $(VENV) $(BUILD_TARGET) format-isort format-ruff @@ -205,17 +192,20 @@ format : $(VENV) $(BUILD_TARGET) format-isort format-ruff test-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - isort --check-only src + $(PREFIX_VENV) \ + isort --check-only src test-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) + $(PREFIX_VENV) \ ruff format --preview --respect-gitignore --check test-pyright : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) ifeq "$(PYRIGHT_MODE)" "pip" + $(PREFIX_VENV) \ pyright else ifeq "$(PYRIGHT_MODE)" "npm" @@ -231,12 +221,14 @@ test-pyright : $(VENV) $(BUILD_TARGET) test-bandit : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) + $(PREFIX_VENV) \ bandit -c pyproject.toml \ --format sarif \ --output $(BANDIT_REPORT) \ -r . || BANDIT_EXIT_CODE=$$? - # don't exit with an error - # while testing bandit. + +# don't exit with an error +# while testing bandit. @echo "Bandit exit code: $$BANDIT_EXIT_CODE" test-pytest : $(VENV) $(BUILD_TARGET) @@ -264,6 +256,7 @@ publish-all : REWRITE_DEPENDENCIES=false publish-all : reset $(VENV) $(ACTIVATE_VENV) + $(PREFIX_VENV) \ ./publish_all.sh $(PYPI_REPO) From 2e7d27b1bf9e7ac2dab991ad0db0b4ac8c38fa4e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 11:33:41 -0700 Subject: [PATCH 46/55] Remove commented code. --- .github/workflows/CICD.yaml | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 3bac26b9..9c5d3d32 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -61,17 +61,6 @@ jobs: echo "cache_key_dependencies=$cache_key_dependencies" >> $GITHUB_OUTPUT echo "cache_key_run=$cache_key_run" >> $GITHUB_OUTPUT - #- name: Restore dependency cache - # if: ${{ success() && (github.event_name != 'workflow_dispatch' || inputs.use_dependency_cache == 'true') }} - # TODO figure out a way to get 3rd party dependencies cached without caching BL_Python ... maybe. - # uses: actions/cache/restore@v4 - # id: restore-dependency-cache - # with: - # path: | - # .github-venv - # node_modules - # key: ${{ steps.generate-cache-keys.outputs.cache_key_dependencies }} - - name: Set up Python ${{ env.PYTHON_VERSION }} #if: ${{ success() && (inputs.use_dependency_cache == 'false' || !steps.restore-dependency-cache.outputs.cache-hit) }} uses: actions/setup-python@v5 @@ -90,17 +79,6 @@ jobs: echo 'prefix=${{ github.workspace }}/node_modules' >> ~/.npmrc npm install -g pyright@`pyright --version | awk '{print $2}'` - # TODO see note above about 3rd party dependencies. - #- name: Save dependency cache - # if: ${{ success() && !steps.restore-dependency-cache.outputs.cache-hit }} - # uses: actions/cache/save@v4 - # id: save-dependency-cache - # with: - # path: | - # .github-venv - # node_modules - # key: ${{ steps.generate-cache-keys.outputs.cache_key_dependencies }} - - name: Save run cache uses: actions/cache/save@v4 id: save-run-cache From 6179357aec1ab9d43fae328da62fff223744ecee Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 12:57:56 -0700 Subject: [PATCH 47/55] Test whether using a consistent and specific Python version fixes venv issues during CICD. --- .github/workflows/CICD.yaml | 22 ++++++++++++---------- Makefile | 19 +------------------ 2 files changed, 13 insertions(+), 28 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 9c5d3d32..ee4c2688 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -41,6 +41,7 @@ jobs: outputs: cache-key-dependencies: ${{ steps.generate-cache-keys.outputs.cache_key }} cache-key-run: ${{ steps.generate-cache-keys.outputs.cache_key_run }} + python-version: ${{ steps.install-python.outputs.python-version }} env: PYTHON_VERSION: "3.10" @@ -61,12 +62,13 @@ jobs: echo "cache_key_dependencies=$cache_key_dependencies" >> $GITHUB_OUTPUT echo "cache_key_run=$cache_key_run" >> $GITHUB_OUTPUT - - name: Set up Python ${{ env.PYTHON_VERSION }} + - name: Set up Python ${{ steps.generate-cache-keys.outputs.python-version }} #if: ${{ success() && (inputs.use_dependency_cache == 'false' || !steps.restore-dependency-cache.outputs.cache-hit) }} uses: actions/setup-python@v5 - id: install_python + id: install-python with: python-version: ${{ env.PYTHON_VERSION }} + #run: echo "python-version=${{ steps.install-python.outputs.python-version }}" >> "$GITHUB_OUTPUT" - name: Install dependencies #if: ${{ success() && (inputs.use_dependency_cache == 'false' || !steps.restore-dependency-cache.outputs.cache-hit) }} @@ -93,14 +95,14 @@ jobs: - Checkout env: - PYTHON_VERSION: "3.10" + PYTHON_VERSION: ${{ needs.Checkout.outputs.python-version }} if: ${{( success() && !cancelled() ) }} steps: - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 - id: install_python + id: install-python with: python-version: ${{ env.PYTHON_VERSION }} @@ -128,14 +130,14 @@ jobs: - Checkout env: - PYTHON_VERSION: "3.10" + PYTHON_VERSION: ${{ needs.Checkout.outputs.python-version }} if: ${{( success() && !cancelled() ) }} steps: - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 - id: install_python + id: install-python with: python-version: ${{ env.PYTHON_VERSION }} @@ -173,7 +175,7 @@ jobs: - Checkout env: - PYTHON_VERSION: "3.10" + PYTHON_VERSION: ${{ needs.Checkout.outputs.python-version }} if: ${{( success() && !cancelled() ) }} @@ -183,7 +185,7 @@ jobs: steps: - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 - id: install_python + id: install-python with: python-version: ${{ env.PYTHON_VERSION }} @@ -225,14 +227,14 @@ jobs: - Checkout env: - PYTHON_VERSION: "3.10" + PYTHON_VERSION: ${{ needs.Checkout.outputs.python-version }} if: ${{( success() && !cancelled() ) }} steps: - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 - id: install_python + id: install-python with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/Makefile b/Makefile index cd7f6b05..6d839e93 100644 --- a/Makefile +++ b/Makefile @@ -52,7 +52,6 @@ endif ACTIVATE_VENV := . $(VENV)/bin/activate -PREFIX_VENV := PATH=$(CURDIR)/$(VENV)/bin:$$PATH REPORT_VENV_USAGE := echo '\nActivate your venv with `. $(VENV)/bin/activate`' PACKAGE_INSTALL_DIR := $(VENV)/lib/python*/site-packages/BL_Python @@ -88,11 +87,9 @@ _dev_build : _dev_configure else $(ACTIVATE_VENV) - $(PREFIX_VENV) \ pip install -e .[dev-dependencies] # By default, psycopg2 is not installed # but it should be for development - $(PREFIX_VENV) \ pip install -e src/database[postgres-binary] rm -rf $(PACKAGE_INSTALL_DIR) @@ -109,11 +106,9 @@ _cicd_build : _cicd_configure else $(ACTIVATE_VENV) - $(PREFIX_VENV) \ pip install .[dev-dependencies] # By default, psycopg2 is not installed # but it should be for CI/CD - $(PREFIX_VENV) \ pip install src/database[postgres-binary] fi @@ -126,10 +121,8 @@ $(PACKAGES) : BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PY $(ACTIVATE_VENV) if [ "$@" = "BL_Python.database" ]; then - $(PREFIX_VENV) \ pip install -e $(dir $<)[postgres-binary] else - $(PREFIX_VENV) \ pip install -e $(dir $<) fi @@ -143,19 +136,17 @@ SETUP_DEPENDENCIES=$(call dep_to_venv_path,toml/__init__.py) $(call dep_to_venv_ $(call dep_to_venv_path,toml/__init__.py): $(VENV) $(ACTIVATE_VENV) - $(PREFIX_VENV) pip install toml + pip install toml $(call dep_to_venv_path,typing_extensions.py): $(VENV) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ pip install typing_extensions $(PACKAGE_PATHS) : $(VENV) $(SETUP_DEPENDENCIES) $(PYPROJECT_FILES) : $(VENV) $(SETUP_DEPENDENCIES) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ REWRITE_DEPENDENCIES=$(REWRITE_DEPENDENCIES) \ GITHUB_REF=$(GITHUB_REF) \ GITHUB_WORKSPACE=$(GITHUB_WORKSPACE) \ @@ -170,20 +161,17 @@ $(VENV) : $(ACTIVATE_VENV) - $(PREFIX_VENV) \ pip install -U pip format-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ isort src format-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ ruff format --preview --respect-gitignore format : $(VENV) $(BUILD_TARGET) format-isort format-ruff @@ -192,20 +180,17 @@ format : $(VENV) $(BUILD_TARGET) format-isort format-ruff test-isort : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ isort --check-only src test-ruff : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ ruff format --preview --respect-gitignore --check test-pyright : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) ifeq "$(PYRIGHT_MODE)" "pip" - $(PREFIX_VENV) \ pyright else ifeq "$(PYRIGHT_MODE)" "npm" @@ -221,7 +206,6 @@ test-pyright : $(VENV) $(BUILD_TARGET) test-bandit : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ bandit -c pyproject.toml \ --format sarif \ --output $(BANDIT_REPORT) \ @@ -256,7 +240,6 @@ publish-all : REWRITE_DEPENDENCIES=false publish-all : reset $(VENV) $(ACTIVATE_VENV) - $(PREFIX_VENV) \ ./publish_all.sh $(PYPI_REPO) From 3c0a7b863f3748666f7a69f922f0db6fd1daa9eb Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 13:13:08 -0700 Subject: [PATCH 48/55] Remove alteration of Python symlink in venv. --- Makefile | 3 --- 1 file changed, 3 deletions(-) diff --git a/Makefile b/Makefile index 6d839e93..b01a80a6 100644 --- a/Makefile +++ b/Makefile @@ -156,9 +156,6 @@ $(PYPROJECT_FILES) : $(VENV) $(SETUP_DEPENDENCIES) $(VENV) : test -d $(VENV) || env python$(PYTHON_VERSION) -m venv $(VENV) -# fix Python symlink that is wrong on GitHub Actions for some reason - ln -sf $(which python$(PYTHON_VERSION)) $(VENV)/bin/python$(PYTHON_VERSION) - $(ACTIVATE_VENV) pip install -U pip From 126b5930231c42c75e545d9161eeb4f2623210b1 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 13:14:33 -0700 Subject: [PATCH 49/55] Update .github/workflows/CICD.yaml Co-authored-by: Nick Wiltsie --- .github/workflows/CICD.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index ee4c2688..e6acd81b 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -68,7 +68,6 @@ jobs: id: install-python with: python-version: ${{ env.PYTHON_VERSION }} - #run: echo "python-version=${{ steps.install-python.outputs.python-version }}" >> "$GITHUB_OUTPUT" - name: Install dependencies #if: ${{ success() && (inputs.use_dependency_cache == 'false' || !steps.restore-dependency-cache.outputs.cache-hit) }} From 47158f94252bd4c2ef3b6d82095dbffb8c887b68 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Wed, 27 Mar 2024 13:46:40 -0700 Subject: [PATCH 50/55] Show warning in PR if Bandit fails. --- .github/workflows/CICD.yaml | 8 +++++++- Makefile | 10 ++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index e6acd81b..0abbd3b6 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -202,7 +202,13 @@ jobs: VENV=.github-venv \ DEFAULT_TARGET=cicd \ - make test-bandit + make test-bandit || BANDIT_EXIT_CODE=$? + + echo "Bandit exit code: $BANDIT_EXIT_CODE" + + if [ $BANDIT_EXIT_CODE -ne 0 ]; then + echo "::warning title=Bandit::Bandit exit code: $BANDIT_EXIT_CODE" + fi - name: Output bandit report artifact uses: actions/upload-artifact@v4 diff --git a/Makefile b/Makefile index b01a80a6..e211add6 100644 --- a/Makefile +++ b/Makefile @@ -203,14 +203,12 @@ test-pyright : $(VENV) $(BUILD_TARGET) test-bandit : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - bandit -c pyproject.toml \ - --format sarif \ - --output $(BANDIT_REPORT) \ - -r . || BANDIT_EXIT_CODE=$$? - # don't exit with an error # while testing bandit. - @echo "Bandit exit code: $$BANDIT_EXIT_CODE" + -bandit -c pyproject.toml \ + --format sarif \ + --output $(BANDIT_REPORT) \ + -r . test-pytest : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) From 50e6e485ac757451d7def5e4e171c597c51add70 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 12:46:15 -0700 Subject: [PATCH 51/55] Change where pytest reports end up. Also: - improve Makefile a bit w/ explicit PHONY targets - parallelize test jobs - continue all test jobs if any fail --- Makefile | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index e211add6..a9be7905 100644 --- a/Makefile +++ b/Makefile @@ -30,6 +30,8 @@ PYPI_REPO ?= testpypi # The directory to write ephermal reports to, # such as pytest coverage reports. REPORTS_DIR ?= reports +BANDIT_REPORT := bandit.sarif +PYTEST_REPORT := pytest # Can be overridden. This is used to change the prereqs @@ -76,8 +78,6 @@ PACKAGE_PATHS=$(subst /pyproject.toml,,$(PYPROJECT_FILES)) PACKAGES=$(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) .PHONY: dev -# Rather than duplicating BL_Python.all, -# just prereq it. dev : $(VENV) $(SETUP_DEPENDENCIES) $(MAKE) _dev_build DEFAULT_TARGET=dev _dev_configure : $(VENV) $(PYPROJECT_FILES) @@ -171,6 +171,7 @@ format-ruff : $(VENV) $(BUILD_TARGET) ruff format --preview --respect-gitignore +.PHONY: format format-ruff format-isort format : $(VENV) $(BUILD_TARGET) format-isort format-ruff @@ -217,12 +218,12 @@ test-pytest : $(VENV) $(BUILD_TARGET) && PYTEST_EXIT_CODE=0 \ || PYTEST_EXIT_CODE=$$? - -coverage html --data-file=$(REPORTS_DIR)/pytest/.coverage - -junit2html $(REPORTS_DIR)/pytest/pytest.xml $(REPORTS_DIR)/pytest/pytest.html + -coverage html --data-file=$(REPORTS_DIR)/$(PYTEST_REPORT)/.coverage + -junit2html $(REPORTS_DIR)/$(PYTEST_REPORT)/pytest.xml $(REPORTS_DIR)/$(PYTEST_REPORT)/pytest.html exit $$PYTEST_EXIT_CODE -.PHONY: test test-pytest test-pyright test-ruff test-isort +.PHONY: test test-pytest test-bandit test-pyright test-ruff test-isort _test : $(VENV) $(BUILD_TARGET) test-isort test-ruff test-pyright test-bandit test-pytest test : CMD_PREFIX=@ test : clean-test @@ -230,8 +231,8 @@ test : clean-test .PHONY: publish-all -# Publishing should use a real install, which `cicd` fulfills publish-all : REWRITE_DEPENDENCIES=false +# Publishing should use a real install. Reset the build env. publish-all : reset $(VENV) $(ACTIVATE_VENV) @@ -254,7 +255,8 @@ clean-build : clean-test : $(CMD_PREFIX)rm -rf \ - $(REPORTS_DIR)/pytest + $(REPORTS_DIR)/$(PYTEST_REPORT) \ + $(REPORTS_DIR)/$(BANDIT_REPORT) .PHONY: clean clean-test clean-build clean : clean-build clean-test From e0760c3b256892fd2e1c8a4f0b556641bd65cd52 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Mon, 18 Mar 2024 12:52:46 -0700 Subject: [PATCH 52/55] Fix misplaced comments. --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index a9be7905..9563a9e5 100644 --- a/Makefile +++ b/Makefile @@ -231,6 +231,7 @@ test : clean-test .PHONY: publish-all +# Publishing should use a real install, which `cicd` fulfills publish-all : REWRITE_DEPENDENCIES=false # Publishing should use a real install. Reset the build env. publish-all : reset $(VENV) From e79ec51a7cb3e1d35ffb0a6e22dbaa89aaa1c763 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 28 Mar 2024 12:25:21 -0700 Subject: [PATCH 53/55] Update Bandit report path to store under reports/ --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 9563a9e5..28db2ab3 100644 --- a/Makefile +++ b/Makefile @@ -208,7 +208,7 @@ test-bandit : $(VENV) $(BUILD_TARGET) # while testing bandit. -bandit -c pyproject.toml \ --format sarif \ - --output $(BANDIT_REPORT) \ + --output $(REPORTS_DIR)/$(BANDIT_REPORT) \ -r . test-pytest : $(VENV) $(BUILD_TARGET) From 822a913ee0a73b3fd05b24d9ad4d74136bf36990 Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 28 Mar 2024 13:09:49 -0700 Subject: [PATCH 54/55] Correct SARIF path in CICD. --- .github/workflows/CICD.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 0abbd3b6..b7eadfee 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -216,14 +216,14 @@ jobs: with: name: bandit-sast-report path: | - bandit.sarif + reports/bandit.sarif retention-days: 1 if-no-files-found: error - name: Upload bandit report to CodeQL uses: github/codeql-action/upload-sarif@v3 with: - sarif_file: bandit.sarif + sarif_file: reports/bandit.sarif Style: name: Style and formatting From 522bb74cb155e1885dcc57d8714906d9249ea33e Mon Sep 17 00:00:00 2001 From: Aaron Holmes Date: Thu, 28 Mar 2024 13:31:38 -0700 Subject: [PATCH 55/55] Add `BL_Python.all` package back to Makefile. --- Makefile | 1 + .../database/migrations/alembic/__main__.py | 9 +---- .../database/migrations/alembic/bl_alembic.py | 26 +++++++------- .../migrations/alembic/test_bl_alembic.py | 36 +++++++++++++++++-- 4 files changed, 49 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index 28db2ab3..bb092064 100644 --- a/Makefile +++ b/Makefile @@ -114,6 +114,7 @@ _cicd_build : _cicd_configure @$(REPORT_VENV_USAGE) +BL_Python.all: $(DEFAULT_TARGET) $(PACKAGES) : BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYPROJECT_FILES) @if [ -d $(call package_to_dist,$*) ]; then @echo "Package $@ is already built, skipping..." diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py index 61b0a807..68fe9dcd 100644 --- a/src/database/BL_Python/database/migrations/alembic/__main__.py +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -24,14 +24,7 @@ def bl_alembic( logger = logging.getLogger() logger.setLevel(log_level) - if allow_overwrite is None: - _allow_overwrite = environ.get(BLAlembic.ALLOW_OVERWRITE_NAME) - allow_overwrite = (_allow_overwrite.lower() if _allow_overwrite else None) in [ - "true", - "1", - ] - - BLAlembic(argv, logger, allow_overwrite).run() + BLAlembic(argv, logger).run() if __name__ == "__main__": diff --git a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py index d5998f8e..695e38bc 100644 --- a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py +++ b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py @@ -17,20 +17,16 @@ class BLAlembic: DEFAULT_CONFIG_NAME: str = "alembic.ini" LOG_LEVEL_NAME: str = "LOG_LEVEL" - ALLOW_OVERWRITE_NAME: str = "ALLOW_OVERWRITE" _run: Callable[[], None] _log: Logger - _allow_overwrite: bool = False @dataclass class FileCopy: source: Path destination: Path - def __init__( - self, argv: list[str] | None, logger: Logger, allow_overwrite: bool = False - ) -> None: + def __init__(self, argv: list[str] | None, logger: Logger) -> None: """ _summary_ @@ -39,7 +35,6 @@ def __init__( :param Logger logger: A logger for writing messages. """ self._log = logger - self._allow_overwrite = allow_overwrite if not argv: argv = sys.argv[1:] @@ -206,7 +201,14 @@ def _write_bl_alembic_config( :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. """ - # need to _not_ use a temp file, and copy the default alembic.ini + config_file_destination = Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME) + if config_file_destination.exists(): + self._log.debug( + f"Configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' exists. Will not attempt to create it." + ) + return + + # copy the default alembic.ini # to the directory in which bl-alembic is executed. self._log.debug( f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." @@ -214,13 +216,13 @@ def _write_bl_alembic_config( self._copy_files([ BLAlembic.FileCopy( Path(Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME), - Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME), + config_file_destination, ) ]) def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): for file in files: - write_mode = "w+b" if self._allow_overwrite or force_overwrite else "x+b" + write_mode = "w+b" if force_overwrite else "x+b" try: with ( open(file.source, "r") as source, @@ -231,10 +233,8 @@ def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): if e.filename != str(file.destination): raise - self._log.warn( - f"""The file '{file.destination}' already exists, but this is unexpected. Refusing to overwrite. - To use the default configuration, delete the existing file, - or set the envvar `{BLAlembic.ALLOW_OVERWRITE_NAME}=True`.""" + self._log.debug( + f"The file '{file.destination}' already exists. Refusing to overwrite, but ignoring exception." ) def _alembic_msg_capture( diff --git a/src/database/test/unit/migrations/alembic/test_bl_alembic.py b/src/database/test/unit/migrations/alembic/test_bl_alembic.py index 0bcef6de..7f2a7114 100644 --- a/src/database/test/unit/migrations/alembic/test_bl_alembic.py +++ b/src/database/test/unit/migrations/alembic/test_bl_alembic.py @@ -89,7 +89,7 @@ def path_se(*args: Any, **kwargs: Any): # set the call args for the Path mocks that are passed # into the FileCopy mock so we can examine them when FileCopy # is called - return MagicMock(args=args) + return MagicMock(args=args, exists=MagicMock(return_value=False)) def file_copy_se(*args: Any, **kwargs: Any): # set a mocked FileCopy whose src/dest are strings (filenames) @@ -119,7 +119,39 @@ def test__BLAlembic__does_not_overwrite_existing_config( _ = mock_alembic(mocker) _ = mock_argv(["upgrade", "head"]) - _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", + return_value=MagicMock(exists=MagicMock(return_value=True)), + ) + _ = mocker.patch("builtins.open", mocker.mock_open()) + log_mock = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Logger") + + bl_alembic = BLAlembic(None, log_mock) + + try: + bl_alembic.run() + except: + pass + + assert [ + True + for call in log_mock.mock_calls + if call.args[0].startswith( + f"Configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' exists." + ) + ] + + +def test__BLAlembic__crashes_when_overwriting_unexpected_file( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_alembic(mocker) + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", + return_value=MagicMock(exists=MagicMock(return_value=False)), + ) open_mock = mocker.patch("builtins.open", mocker.mock_open()) def raise_file_exists_error(*args: Any, **kwargs: Any):