Skip to content

Commit

Permalink
Merge pull request #66 from GeoscienceAustralia/update_mask_funcs
Browse files Browse the repository at this point in the history
Update ancillary data loading functions
  • Loading branch information
vnewey authored Mar 4, 2024
2 parents 71dcd3e + 19fd698 commit cbdc270
Show file tree
Hide file tree
Showing 8 changed files with 1,458 additions and 609 deletions.
7 changes: 2 additions & 5 deletions .github/workflows/dea-intertidal-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ on:
- 'codecov.yaml'

release:
types: [created, edited, published]
types: [edited, published]

permissions:
id-token: write # Required for requesting Json web token
Expand Down Expand Up @@ -107,13 +107,10 @@ jobs:
cp ./artifacts/validation.csv ./tests/validation.csv
cp ./artifacts/README.md ./tests/README.md
# - name: Setup upterm session
# uses: lhotari/action-upterm@v1

# Commit validation results produced by integration tests back into repo
- name: Commit validation results into repository
uses: stefanzweifel/git-auto-commit-action@v4
if: github.event_name != 'release'
if: github.event_name == 'pull_request'
with:
commit_message: Automatically update integration test validation results
file_pattern: 'tests/validation.jpg tests/validation.csv tests/README.md'
Expand Down
19 changes: 8 additions & 11 deletions intertidal/elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@

from intertidal.io import (
load_data,
load_aclum,
load_topobathy,
load_topobathy_mask,
load_aclum_mask,
prepare_for_export,
tidal_metadata,
export_dataset_metadata,
Expand Down Expand Up @@ -1103,14 +1103,11 @@ def intertidal_cli(
)
satellite_ds.load()

# Load data from GA's AusBathyTopo 250m 2023 Grid
topobathy_ds = load_topobathy(
dc, satellite_ds, product="ga_ausbathytopo250m_2023", resampling="bilinear"
)
valid_mask = topobathy_ds.height_depth > -15
# Load topobathy mask from GA's AusBathyTopo 250m 2023 Grid
topobathy_mask = load_topobathy_mask(dc, satellite_ds.odc.geobox.compat)

# Load and reclassify for intensive urban land use class only the ABARES ACLUM ds
reclassified_aclum = load_aclum(dc, satellite_ds)
# Load urban land use class mask from ABARES CLUM
reclassified_aclum = load_aclum_mask(dc, satellite_ds.odc.geobox.compat)

# Also load ancillary dataset IDs to use in metadata
# (both layers are continental continental products with only
Expand All @@ -1123,7 +1120,7 @@ def intertidal_cli(
log.info(f"{run_id}: Calculating Intertidal Elevation")
ds, tide_m = elevation(
satellite_ds,
valid_mask=valid_mask,
valid_mask=topobathy_mask,
ndwi_thresh=ndwi_thresh,
min_freq=min_freq,
max_freq=max_freq,
Expand Down Expand Up @@ -1187,7 +1184,7 @@ def intertidal_cli(
# Prepare data for export
ds["qa_ndwi_freq"] *= 100 # Convert frequency to %
ds_prepared = prepare_for_export(ds) # sets correct dtypes and nodata

# Calculate additional tile-level tidal metadata attributes
metadata_dict = tidal_metadata(ds)

Expand Down
73 changes: 50 additions & 23 deletions intertidal/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,12 +460,15 @@ def load_data(
return satellite_ds, dss_s2, dss_ls


def load_topobathy(
def load_topobathy_mask(
dc,
satellite_ds,
geobox,
product="ga_ausbathytopo250m_2023",
elevation_band="height_depth",
resampling="bilinear",
mask_invalid=True,
min_threshold=-15,
mask_filters=[("dilation", 25)],
):
"""
Loads a topo-bathymetric DEM for the extents of the loaded satellite
Expand All @@ -476,38 +479,58 @@ def load_topobathy(
----------
dc : Datacube
A Datacube instance for loading data.
satellite_ds : ndarray
The loaded satellite data, used to obtain the spatial extents
of the data.
geobox : ndarray
The GeoBox of the loaded satellite data, used to ensure the data
is loaded into the same pixel grid (e.g. resolution, extents, CRS).
product : str, optional
The name of the topo-bathymetric DEM product to load from the
datacube. Defaults to "ga_ausbathytopo250m_2023".
elevation_band : str, optional
The name of the band containing elevation data. Defaults to
"height_depth".
resampling : str, optional
The resampling method to use, by default "bilinear".
mask_invalid : bool, optional
Whether to mask invalid/nodata values in the array by setting
them to NaN, by default True.
min_threshold : int or float, optional
The elevation value used to create the mask; all pixels with
elevations above this value will be given a value of True.
mask_filters : list of tuples, optional
An optional list of morphological processing steps to pass to
the `mask_cleanup` function. The default is `[("dilation", 25)]`,
which will dilate True pixels by a radius of 25 pixels (~250 m).
Returns
-------
topobathy_ds : xarray.Dataset
The loaded topo-bathymetric DEM.
topobathy_ds : xarray.DataArray
An output boolean mask, where True represent pixels to use in the
following analysis.
"""
topobathy_ds = dc.load(
product=product, like=satellite_ds.odc.geobox.compat, resampling=resampling
).squeeze("time")
# Load from datacube, reprojecting to GeoBox of input satellite data
topobathy_ds = dc.load(product=product, like=geobox, resampling=resampling).squeeze(
"time"
)

# Mask invalid data
if mask_invalid:
topobathy_ds = mask_invalid_data(topobathy_ds)

return topobathy_ds
# Threshold to minumum elevation
topobathy_mask = topobathy_ds[elevation_band] > min_threshold

# If requested, apply cleanup
if mask_filters is not None:
topobathy_mask = mask_cleanup(topobathy_mask, mask_filters=mask_filters)

return topobathy_mask


def load_aclum(
def load_aclum_mask(
dc,
satellite_ds,
geobox,
product="abares_clum_2020",
class_band="alum_class",
resampling="nearest",
mask_invalid=True,
):
Expand All @@ -521,12 +544,15 @@ def load_aclum(
----------
dc : Datacube
A Datacube instance for loading data.
satellite_ds : ndarray
The loaded satellite data, used to obtain the spatial extents
of the data.
geobox : ndarray
The GeoBox of the loaded satellite data, used to ensure the data
is loaded into the same pixel grid (e.g. resolution, extents, CRS).
product : str, optional
The name of the ABARES land use dataset product to load from the
datacube. Defaults to "abares_clum_2020".
class_band : str, optional
The name of the band containing land use class data. Defaults to
"alum_class".
resampling : str, optional
The resampling method to use, by default "nearest".
mask_invalid : bool, optional
Expand All @@ -535,22 +561,23 @@ def load_aclum(
Returns
-------
reclassified_aclum : xarray.Dataset
The ABARES land use mask, summarised to include only two land
use classes: 'intensive urban' and 'other'.
reclassified_aclum : xarray.DataArray
An output boolean mask, where True equals intensive urban and
False equals all other classes.
"""
# Load from datacube, reprojecting to GeoBox of input satellite data
aclum_ds = dc.load(
product=product, like=satellite_ds.odc.geobox.compat, resampling=resampling
product=product, like=geobox, resampling=resampling
).squeeze("time")

# Mask invalid data
if mask_invalid:
aclum_ds = mask_invalid_data(aclum_ds)

# Manually isolate the 'intensive urban' land use summary class, set
# all other pixels to false. For class definitions, refer to
# all other pixels to False. For class definitions, refer to
# gdata1/data/land_use/ABARES_CLUM/geotiff_clum_50m1220m/Land use, 18-class summary.qml)
reclassified_aclum = aclum_ds.alum_class.isin(
reclassified_aclum = aclum_ds[class_band].isin(
[
500,
530,
Expand Down Expand Up @@ -926,7 +953,7 @@ def export_dataset_metadata(
Dataset maturity to use for the output dataset. Default is
"final", can also be "interim".
additional_metadata : dict, optional
An option dictionary containing additional metadata fields to
An option dictionary containing additional metadata fields to
add to the dataset metadata properties.
debug : bool, optional
When true, this will write S3 outputs locally so they can be
Expand Down
Loading

0 comments on commit cbdc270

Please sign in to comment.