Skip to content

Commit

Permalink
Dev (#407)
Browse files Browse the repository at this point in the history
  • Loading branch information
redjax authored Feb 19, 2024
2 parents 435149c + af26dda commit 073b2af
Show file tree
Hide file tree
Showing 39 changed files with 541 additions and 227 deletions.
1 change: 1 addition & 0 deletions .github/workflows/export-requirements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ github.token }}

- name: Set up Python 3.11
uses: actions/setup-python@v5
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/lint-format.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ github.token }}

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ github.token }}

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
Expand Down
42 changes: 36 additions & 6 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,26 @@
nox.options.error_on_external_run = False
nox.options.error_on_missing_interpreters = False
# nox.options.report = True

## Define sessions to run when no session is specified
nox.sessions = ["lint", "export", "tests"]

## Get tuple of Python ver ('maj', 'min', 'mic')
PY_VER_TUPLE = platform.python_version_tuple()
## Dynamically set Python version
DEFAULT_PYTHON: str = f"{PY_VER_TUPLE[0]}.{PY_VER_TUPLE[1]}"
# INIT_COPY_FILES: list[dict[str, str]] = [
# {"src": "config/.secrets.example.toml", "dest": "config/.secrets.toml"},
# {"src": "config/settings.toml", "dest": "config/settings.local.toml"},
# ]
## Define versions to test
PY_VERSIONS: list[str] = ["3.12", "3.11"]
## Set PDM version to install throughout
PDM_VER: str = "2.11.2"
PDM_VER: str = "2.12.3"
## Set paths to lint with the lint session
LINT_PATHS: list[str] = ["src", "tests", "./noxfile.py"]

## Get tuple of Python ver ('maj', 'min', 'mic')
PY_VER_TUPLE = platform.python_version_tuple()
## Dynamically set Python version
DEFAULT_PYTHON: str = f"{PY_VER_TUPLE[0]}.{PY_VER_TUPLE[1]}"

## Set directory for requirements.txt file output
REQUIREMENTS_OUTPUT_DIR: Path = Path("./requirements")
## Ensure REQUIREMENTS_OUTPUT_DIR path exists
Expand Down Expand Up @@ -93,7 +101,7 @@ def export_requirements(session: nox.Session, pdm_ver: str):
"pdm",
"export",
"--prod",
"--no-default",
# "--no-default",
"-o",
f"{REQUIREMENTS_OUTPUT_DIR}/requirements.txt",
"--without-hashes",
Expand Down Expand Up @@ -180,3 +188,25 @@ def build_docs(session: nox.Session, pdm_ver: str):

print("Building docs with mkdocs")
session.run("pdm", "run", "mkdocs", "build")


# @nox.session(python=[PY_VER_TUPLE], name="init-setup")
# def run_initial_setup(session: nox.Session):
# if INIT_COPY_FILES is None:
# print(f"INIT_COPY_FILES is empty. Skipping")
# pass

# else:

# for pair_dict in INIT_COPY_FILES:
# src = Path(pair_dict["src"])
# dest = Path(pair_dict["dest"])
# if not dest.exists():
# print(f"Copying {src} to {dest}")
# try:
# shutil.copy(src, dest)
# except Exception as exc:
# msg = Exception(
# f"Unhandled exception copying file from '{src}' to '{dest}'. Details: {exc}"
# )
# print(f"[ERROR] {msg}")
247 changes: 56 additions & 191 deletions pdm.lock

Large diffs are not rendered by default.

10 changes: 7 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,18 @@ standard = [
]
all = [
"diskcache>=5.6.3",
"fastapi>=0.109.0",
"uvicorn[standard]>=0.25.0",
"fastapi>=0.109.2",
"uvicorn>=0.27.1",
"loguru>=0.7.2",
"httpx>=0.26.0",
"msgpack>=1.0.7",
"pendulum>=3.0.0",
"sqlalchemy>=2.0.25",
"arrow>=1.3.0",
"pandas>=2.1.4",
"fastparquet>=2023.10.1",
"fastparquet>=2024.2.0",
"pyarrow>=15.0.0",
"ipykernel>=6.29.2",
]
fastapi = [
"fastapi>=0.109.0",
Expand Down Expand Up @@ -75,12 +77,14 @@ data = [
dataframes = [
"pandas>=2.1.4",
"fastparquet>=2023.10.1",
"pyarrow>=15.0.0",
]
ci = [
"tox>=4.12.0",
"ruff>=0.1.13",
"black>=23.12.1",
"pytest>=7.4.4",
"nox>=2023.4.22",
]
docs = [
"pygments>=2.17.2",
Expand Down
21 changes: 21 additions & 0 deletions requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,24 @@
# This file is @generated by PDM.
# Please do not edit it manually.

anyio==4.2.0
certifi==2023.11.17
colorama==0.4.6; sys_platform == "win32"
diskcache==5.6.3
h11==0.14.0
httpcore==1.0.2
httpx==0.26.0
idna==3.6
loguru==0.7.2
markdown-it-py==3.0.0
mdurl==0.1.2
msgpack==1.0.7
pendulum==3.0.0
pygments==2.17.2
python-dateutil==2.8.2
rich==13.7.0
six==1.16.0
sniffio==1.3.0
time-machine==2.13.0; implementation_name != "pypy"
tzdata==2023.4
win32-setctime==1.1.0; sys_platform == "win32"
2 changes: 2 additions & 0 deletions ruff.ci.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ ignore = [
"D401", ## First line of docstring should be in imperative mood
"E402", ## Module level import not at top of file
"D404", ## First word of the docstring should not be "This"
"D406", ## Section name should end with a newline
"D407", ## Missing dashed underline after section
"D414", ## Section has no content
"D415", ## First line should end with a period, question mark, or exclamation point
"D417", ## Missing argument descriptions in the docstring for [variables]
Expand Down
58 changes: 33 additions & 25 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ line-length = 88
## Enable pycodestyle ("E") and Pyflakes ("F") codes by default
# # Docs: https://beta.ruff.rs/docs/rules/
select = [
"D", ## pydocstyle
"E", ## pycodestyle
"I", ## isort
"D", ## pydocstyle
"E", ## pycodestyle
"I", ## isort
"I001", ## Unused imports
]

Expand All @@ -20,16 +20,18 @@ select = [
# ignoring that check. When the line is
# uncommented, the check will be run.
ignore = [
"D100", ## missing-docstring-in-public-module
"D101", ## missing-docstring-in-public-class
"D102", ## missing-docstring-in-public-method
"D103", ## Missing docstring in public function
"D106", ## missing-docstring-in-public-nested-class
"D203", ## one-blank-line-before-class
"D213", ## multi-line-summary-second-line
"E501", ## Line too long
"E402", ## Module level import not at top of file
"E722" ## Do note use bare `except`
"D100", ## missing-docstring-in-public-module
"D101", ## missing-docstring-in-public-class
"D102", ## missing-docstring-in-public-method
"D103", ## Missing docstring in public function
"D106", ## missing-docstring-in-public-nested-class
"D203", ## one-blank-line-before-class
"D213", ## multi-line-summary-second-line
"E501", ## Line too long
"E402", ## Module level import not at top of file
"D406", ## Section name should end with a newline
"D407", ## Missing dashed underline after section
"E722", ## Do note use bare `except`
]

## Allow autofix for all enabled rules (when "--fix") is provided.
Expand All @@ -39,24 +41,24 @@ fixable = [
# "A", ## flake8-builtins
# "B", ## flake8-bugbear
"C",
"D", ## pydocstyle
"E", ## pycodestyle-error
"D", ## pydocstyle
"E", ## pycodestyle-error
# "F", ## pyflakes
# "G", ## flake8-logging-format
"I", ## isort
"N", ## pep8-naming
"I", ## isort
"N", ## pep8-naming
# "Q", ## flake8-quotas
# "S", ## flake8-bandit
"T",
"W", ## pycodestyle-warning
"W", ## pycodestyle-warning
# "ANN", ## flake8-annotations
# "ARG", ## flake8-unused-arguments
# "BLE", ## flake8-blind-except
# "COM", ## flake8-commas
# "DJ", ## flake8-django
# "DTZ", ## flake8-datetimez
# "EM", ## flake8-errmsg
"ERA", ## eradicate
"ERA", ## eradicate
# "EXE", ## flake8-executables
# "FBT", ## flake8-boolean-trap
# "ICN", ## flake8-imort-conventions
Expand All @@ -66,18 +68,18 @@ fixable = [
# "PD", ## pandas-vet
# "PGH", ## pygrep-hooks
# "PIE", ## flake8-pie
"PL", ## pylint
"PL", ## pylint
# "PT", ## flake8-pytest-style
# "PTH", ## flake8-use-pathlib
# "PYI", ## flake8-pyi
# "RET", ## flake8-return
# "RSE", ## flake8-raise
"RUF", ## ruf-specific rules
"RUF", ## ruf-specific rules
# "SIM", ## flake8-simplify
# "SLF", ## flake8-self
# "TCH", ## flake8-type-checking
"TID", ## flake8-tidy-imports
"TRY", ## tryceratops
"TID", ## flake8-tidy-imports
"TRY", ## tryceratops
"UP", ## pyupgrade
# "YTT" ## flake8-2020
]
Expand All @@ -93,7 +95,7 @@ exclude = [
".venv",
"__pypackages__",
"__pycache__",
"*.pyc"
"*.pyc",
]

[per-file-ignores]
Expand All @@ -120,4 +122,10 @@ relative-imports-order = "closest-to-furthest"
## Automatically add imports below to top of files
required-imports = ["from __future__ import annotations"]
## Define isort section priority
section-order = ["future", "standard-library", "first-party", "local-folder", "third-party"]
section-order = [
"future",
"standard-library",
"first-party",
"local-folder",
"third-party",
]
1 change: 1 addition & 0 deletions src/red_utils/core/dataclass_utils/mixins/mixin_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def as_dict(self: Generic[T]):
Returns
-------
A Python `dict` representation of a Python `dataclass` class.
"""
try:
return self.__dict__.copy()
Expand Down
10 changes: 10 additions & 0 deletions src/red_utils/ext/dataframe_utils/pandas_utils/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def get_oldest_newest(
-------
(pandas.Series|pandas.DataFrame): A Pandas `DataFrame` or `Series` containing oldest & newest records
in the input `DataFrame`.
"""
if df is None or df.empty:
raise ValueError("Missing or empty DataFrame")
Expand Down Expand Up @@ -73,6 +74,7 @@ def rename_df_cols(
Returns
-------
(pandas.DataFrame): A renamed Pandas `DataFrame`.
"""
if col_rename_map is None:
msg = ValueError("No col_rename_map passed")
Expand Down Expand Up @@ -107,6 +109,7 @@ def count_df_rows(df: pd.DataFrame = None) -> int:
Returns
-------
(int): Count of rows in a `DataFrame`
"""
if df is not None:
if df.empty:
Expand Down Expand Up @@ -134,6 +137,7 @@ def load_pqs_to_df(
Returns
-------
(list[pandas.DataFrame]): A list of Pandas `DataFrame`s created from files in `search_dir`
"""
if search_dir is None:
raise ValueError("Missing a directory to search")
Expand Down Expand Up @@ -184,6 +188,7 @@ def convert_csv_to_pq(
Returns
-------
(bool): `True` if `csv_file` is converted to `pq_file` successfully
"""
if csv_file is None:
raise ValueError("Missing a CSV input file to read from")
Expand Down Expand Up @@ -236,6 +241,7 @@ def convert_pq_to_csv(
Returns
-------
(bool): `True` if `pq_file` is converted to `csv_file` successfully
"""
if csv_file is None:
raise ValueError("Missing a CSV file to save to")
Expand Down Expand Up @@ -277,6 +283,7 @@ def load_pq(pq_file: Union[str, Path] = None) -> pd.DataFrame:
Returns
-------
(pandas.DataFrame): A Pandas `DataFrame` loaded from a `.parquet` file
"""
if pq_file is None:
raise ValueError("Missing pq_file to load")
Expand Down Expand Up @@ -325,6 +332,7 @@ def save_pq(
-------
(bool): `True` if `DataFrame` is saved to `pq_file` successfully
(bool): `False` if `DataFrame` is not saved to `pq_file` successfully
"""
if df is None or df.empty:
msg = ValueError("DataFrame is None or empty")
Expand Down Expand Up @@ -377,6 +385,7 @@ def load_csv(csv_file: Union[str, Path] = None, delimiter: str = ",") -> pd.Data
Returns
-------
(pandas.DataFrame): A Pandas `DataFrame` with data loaded from the `csv_file`
"""
if csv_file is None:
raise ValueError("Missing output path")
Expand Down Expand Up @@ -433,6 +442,7 @@ def save_csv(
-------
(bool): `True` if `DataFrame` is saved to `csv_file` successfully
(bool): `False` if `DataFrame` is not saved to `csv_file` successfully
"""
if df is None or df.empty:
msg = ValueError("DataFrame is None or empty")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def validate_df_col_type(col_type: str = None) -> str:
Returns
-------
(str): The validated `col_type`
"""
if col_type is None:
raise ValueError("Missing a column type to validate")
Expand Down
Loading

0 comments on commit 073b2af

Please sign in to comment.