Skip to content

Commit

Permalink
Merge pull request #84 from GeoscienceAustralia/intertidal_updates
Browse files Browse the repository at this point in the history
Intertidal updates for latest Sandbox image (to merge when released)
  • Loading branch information
erialC-P authored May 10, 2024
2 parents 331aac1 + 094f893 commit 00fdde3
Show file tree
Hide file tree
Showing 9 changed files with 515 additions and 660 deletions.
15 changes: 9 additions & 6 deletions intertidal/elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,19 +412,22 @@ def pixel_dem(
if interp_intervals is not None:
print(f"Applying tidal interval interpolation to {interp_intervals} intervals")
interval_ds = interval_ds.interp(
interval=np.linspace(0, interval_ds.interval.max(), interp_intervals),
coords={
"interval": np.linspace(0, interval_ds.interval.max(), interp_intervals)
},
method="linear",
)
# Required as recent versions of xarray return new coord as a variable
).set_coords("interval")

# Smooth tidal intervals using a rolling mean
if smooth_radius is not None:
print(f"Applying rolling mean smoothing with radius {smooth_radius}")
smoothed_ds = interval_ds.rolling(
interval=smooth_radius,
center=False,
min_periods=int(smooth_radius / 2.0)
if min_periods == "auto"
else min_periods,
min_periods=(
int(smooth_radius / 2.0) if min_periods == "auto" else min_periods
),
).mean()
else:
smoothed_ds = interval_ds
Expand Down Expand Up @@ -1171,7 +1174,7 @@ def intertidal_cli(
input_params = locals()
run_id = f"[{output_version}] [{label_date}] [{study_area}]"
log = configure_logging(run_id)

# Record params in logs
log.info(f"{run_id}: Using parameters {input_params}")

Expand Down
1,056 changes: 451 additions & 605 deletions notebooks/Intertidal_elevation_stac.ipynb

Large diffs are not rendered by default.

40 changes: 20 additions & 20 deletions requirements.in
Original file line number Diff line number Diff line change
@@ -1,33 +1,33 @@
--extra-index-url=https://packages.dea.ga.gov.au/
aiohttp
botocore
click==8.1.3
datacube[s3,performance]==1.8.13
dea-tools==0.3.2.dev55
eodatasets3==0.30.4
geopandas==0.13.2
matplotlib==3.7.1
click==8.1.7
datacube[s3,performance]==1.8.18
dea-tools==0.3.2
eodatasets3==0.30.5
geopandas==0.14.3
matplotlib==3.8.4
mdutils
numpy==1.24.3
numpy==1.26.4
odc-algo==0.2.3
odc-geo==0.4.3
odc-ui
pyTMD==2.0.5
pandas==1.5.3
pandas==2.2.2
pygeos==0.14
pyproj==3.4.1
pyproj==3.6.1
pytest
pytest-dependency
pytest-cov
pytz==2023.3
rioxarray
rasterio==1.3.4
seaborn==0.13.0
scikit_image==0.19.3
scikit_learn==1.2.2
scipy==1.10.1
pyTMD==2.1.0
pytz==2024.1
rioxarray==0.15.5
rasterio==1.3.8
seaborn==0.13.2
scikit-image==0.22.0
scikit-learn==1.4.2
scipy==1.13.0
sunriset==1.0
Shapely==2.0.1
tqdm==4.65.0
xarray==2023.1.0
shapely==2.0.1
tqdm==4.66.2
xarray==2024.3.0
xskillscore==0.0.24
51 changes: 28 additions & 23 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ ciso8601==2.3.1
# datacube
# dea-tools
# eodatasets3
click==8.1.3
click==8.1.7
# via
# -r requirements.in
# click-plugins
Expand Down Expand Up @@ -152,7 +152,7 @@ dask-image==2023.3.0
# via odc-algo
dask-ml==1.0.0
# via dea-tools
datacube[performance,s3]==1.8.13
datacube[performance,s3]==1.8.18
# via
# -r requirements.in
# datacube-ows
Expand All @@ -162,21 +162,23 @@ datacube[performance,s3]==1.8.13
# odc-ui
datacube-ows==1.8.39
# via dea-tools
dea-tools==0.3.2.dev54
dea-tools==0.3.2
# via -r requirements.in
decorator==5.1.1
# via ipython
deepdiff==6.7.1
# via datacube-ows
defusedxml==0.7.1
# via eodatasets3
deprecat==2.1.1
# via datacube
distributed==2024.3.1
# via
# dask-glm
# dask-ml
# datacube
# odc-algo
eodatasets3==0.30.4
eodatasets3==0.30.5
# via -r requirements.in
exceptiongroup==1.2.0
# via
Expand Down Expand Up @@ -216,7 +218,7 @@ geoalchemy2==0.14.6
# datacube-ows
geographiclib==2.0
# via geopy
geopandas==0.13.2
geopandas==0.14.3
# via
# -r requirements.in
# dea-tools
Expand Down Expand Up @@ -291,6 +293,8 @@ lark==1.1.9
# via
# datacube
# datacube-ows
lazy-loader==0.4
# via scikit-image
llvmlite==0.42.0
# via numba
locket==1.0.0
Expand All @@ -308,7 +312,7 @@ markupsafe==2.1.5
# via
# jinja2
# werkzeug
matplotlib==3.7.1
matplotlib==3.8.4
# via
# -r requirements.in
# datacube-ows
Expand Down Expand Up @@ -342,7 +346,7 @@ numba==0.59.1
# xskillscore
numexpr==2.9.0
# via odc-algo
numpy==1.24.3
numpy==1.26.4
# via
# -r requirements.in
# bottleneck
Expand Down Expand Up @@ -372,7 +376,6 @@ numpy==1.24.3
# properscoring
# pygeos
# pytmd
# pywavelets
# rasterio
# rasterstats
# rioxarray
Expand Down Expand Up @@ -416,14 +419,15 @@ packaging==24.0
# distributed
# geoalchemy2
# geopandas
# lazy-loader
# matplotlib
# planetary-computer
# pytest
# rioxarray
# scikit-image
# setuptools-scm
# xarray
pandas==1.5.3
pandas==2.2.2
# via
# -r requirements.in
# dask-ml
Expand Down Expand Up @@ -488,7 +492,7 @@ pyparsing==3.1.2
# datacube-ows
# matplotlib
# snuggs
pyproj==3.4.1
pyproj==3.6.1
# via
# -r requirements.in
# datacube
Expand Down Expand Up @@ -535,11 +539,11 @@ python-rapidjson==1.16
# via eodatasets3
python-slugify==8.0.4
# via datacube-ows
pytmd==2.0.5
pytmd==2.1.0
# via
# -r requirements.in
# dea-tools
pytz==2023.3
pytz==2024.1
# via
# -r requirements.in
# datacube-ows
Expand All @@ -549,16 +553,13 @@ pytz==2023.3
# pandas
# planetary-computer
# sunriset
pywavelets==1.5.0
# via scikit-image
pyyaml==6.0.1
# via
# dask
# datacube
# distributed
# owslib
# pytmd
rasterio==1.3.4
rasterio==1.3.8
# via
# -r requirements.in
# datacube
Expand Down Expand Up @@ -586,7 +587,7 @@ requests==2.31.0
# owslib
# planetary-computer
# pystac-client
rioxarray==0.15.1
rioxarray==0.15.5
# via
# -r requirements.in
# dea-tools
Expand All @@ -602,19 +603,19 @@ ruamel-yaml-clib==0.2.8
# via ruamel-yaml
s3transfer==0.10.1
# via boto3
scikit-image==0.19.3
scikit-image==0.22.0
# via
# -r requirements.in
# dea-tools
# odc-algo
scikit-learn==1.2.2
scikit-learn==1.4.2
# via
# -r requirements.in
# dask-glm
# dask-ml
# dea-tools
# xskillscore
scipy==1.10.1
scipy==1.13.0
# via
# -r requirements.in
# dask-glm
Expand All @@ -630,7 +631,7 @@ scipy==1.10.1
# scikit-learn
# sparse
# xskillscore
seaborn==0.13.0
seaborn==0.13.2
# via -r requirements.in
setuptools-scm==8.0.4
# via
Expand Down Expand Up @@ -698,7 +699,7 @@ toolz==0.12.1
# xskillscore
tornado==6.4
# via distributed
tqdm==4.65.0
tqdm==4.66.2
# via
# -r requirements.in
# dea-tools
Expand All @@ -717,6 +718,8 @@ typing-extensions==4.10.0
# pydantic
# pydantic-core
# setuptools-scm
tzdata==2024.1
# via pandas
urllib3==2.2.1
# via
# botocore
Expand All @@ -728,7 +731,9 @@ werkzeug==3.0.1
# via flask
widgetsnbextension==4.0.10
# via ipywidgets
xarray==2023.1.0
wrapt==1.16.0
# via deprecat
xarray==2024.3.0
# via
# -r requirements.in
# datacube
Expand Down
2 changes: 1 addition & 1 deletion tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Integration tests
This directory contains tests that are run to verify that DEA Intertidal code runs correctly. The ``test_intertidal.py`` file runs a small-scale full workflow analysis over an intertidal flat in the Gulf of Carpentaria using the DEA Intertidal [Command Line Interface (CLI) tools](../notebooks/Intertidal_CLI.ipynb), and compares these results against a LiDAR validation DEM to produce some simple accuracy metrics.

The latest integration test completed at **2024-04-12 10:45**. Compared to the previous run, it had an:
The latest integration test completed at **2024-05-07 10:13**. Compared to the previous run, it had an:
- RMSE accuracy of **0.14 m ( :heavy_minus_sign: no change)**
- MAE accuracy of **0.12 m ( :heavy_minus_sign: no change)**
- Bias of **0.12 m ( :heavy_minus_sign: no change)**
Expand Down
Binary file not shown.
8 changes: 3 additions & 5 deletions tests/test_intertidal.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import rioxarray
import numpy as np
import pandas as pd
import xarray as xr
import seaborn as sns
from mdutils import Html
from mdutils.mdutils import MdUtils
Expand All @@ -23,12 +24,9 @@
@pytest.fixture()
def satellite_ds():
"""
Loads a timeseries of satellite data from a .pickle file.
TODO: Replace this with data loaded directly from datacube
after adding access to prod database.
Loads a pre-generated timeseries of satellite data from NetCDF.
"""
with open("tests/data/satellite_ds.pickle", "rb") as handle:
return pickle.load(handle)
return xr.open_dataset("tests/data/satellite_ds.nc")


@pytest.mark.dependency()
Expand Down
3 changes: 3 additions & 0 deletions tests/validation.csv
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,6 @@ time,Correlation,RMSE,MAE,R-squared,Bias,Regression slope
2024-03-25 06:38:58.505117+00:00,0.975,0.141,0.121,0.95,0.116,1.11
2024-03-26 00:54:18.363134+00:00,0.975,0.141,0.121,0.95,0.116,1.11
2024-04-12 00:45:53.360679+00:00,0.975,0.141,0.121,0.764,0.116,1.11
2024-04-23 05:49:24.645404+00:00,0.975,0.141,0.121,0.95,0.116,1.11
2024-05-06 22:50:22.431050+00:00,0.975,0.141,0.121,0.95,0.116,1.11
2024-05-07 00:13:21.589490+00:00,0.975,0.141,0.121,0.95,0.116,1.11
Binary file modified tests/validation.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 00fdde3

Please sign in to comment.