diff --git a/intertidal/elevation.py b/intertidal/elevation.py index 4458c6c..8eae174 100644 --- a/intertidal/elevation.py +++ b/intertidal/elevation.py @@ -1232,7 +1232,8 @@ def intertidal_cli( ds_prepared = prepare_for_export(ds) # sets correct dtypes and nodata # Calculate additional tile-level tidal metadata attributes - metadata_dict = tidal_metadata(ds) + # (requires exposure/offsets to have been calculated) + metadata_dict = tidal_metadata(ds) if exposure_offsets else None # Export data and metadata export_dataset_metadata( diff --git a/intertidal/io.py b/intertidal/io.py index ea763c1..de4907d 100644 --- a/intertidal/io.py +++ b/intertidal/io.py @@ -7,6 +7,7 @@ import xarray as xr from pathlib import Path from urllib.parse import urlparse +from rasterio.enums import Resampling from rasterio.errors import NotGeoreferencedWarning import datacube @@ -486,7 +487,7 @@ def load_topobathy_mask( The name of the topo-bathymetric DEM product to load from the datacube. Defaults to "ga_ausbathytopo250m_2023". elevation_band : str, optional - The name of the band containing elevation data. Defaults to + The name of the band containing elevation data. Defaults to "height_depth". resampling : str, optional The resampling method to use, by default "bilinear". @@ -497,7 +498,7 @@ def load_topobathy_mask( The elevation value used to create the mask; all pixels with elevations above this value will be given a value of True. mask_filters : list of tuples, optional - An optional list of morphological processing steps to pass to + An optional list of morphological processing steps to pass to the `mask_cleanup` function. The default is `[("dilation", 25)]`, which will dilate True pixels by a radius of 25 pixels (~250 m). @@ -562,13 +563,13 @@ def load_aclum_mask( Returns ------- reclassified_aclum : xarray.DataArray - An output boolean mask, where True equals intensive urban and + An output boolean mask, where True equals intensive urban and False equals all other classes. """ # Load from datacube, reprojecting to GeoBox of input satellite data - aclum_ds = dc.load( - product=product, like=geobox, resampling=resampling - ).squeeze("time") + aclum_ds = dc.load(product=product, like=geobox, resampling=resampling).squeeze( + "time" + ) # Mask invalid data if mask_invalid: @@ -815,8 +816,8 @@ def _ls_platform_instrument(year): def prepare_for_export( ds, int_bands=None, - int_nodata=-999, - int_dtype=np.int16, + int_nodata=255, + int_dtype=np.uint8, float_dtype=np.float32, output_location=None, overwrite=True, @@ -837,10 +838,10 @@ def prepare_for_export( "offset_hightide", "offset_lowtide", "spread") int_nodata : int, optional An integer that represents nodata values for integer bands - (default is -999). + (default is 255). int_dtype : string or numpy data type, optional The data type to use for integer layers (default is - np.int16). + np.uint8). float_dtype : string or numpy data type, optional The data type to use for floating point layers (default is np.float32). @@ -860,7 +861,7 @@ def prepare_for_export( def _prepare_band( band, int_bands, int_nodata, int_dtype, float_dtype, output_location, overwrite ): - # Export specific bands as integer16 data types by first filling + # Export specific bands as integer data types by first filling # NaN with nodata value before converting to int, then setting # nodata attribute on layer if band.name in int_bands: @@ -1026,14 +1027,14 @@ def export_dataset_metadata( label_parts[-2] = time_convention dataset_assembler.names.dataset_label = "_".join(label_parts) - # Write measurements from xarray, extracting nodata values - # from each input array and assigning these on the outputs - for dataarray in ds: - log.info(f"{run_id}: Writing array {dataarray}") - nodata = ds[dataarray].attrs.get("nodata", None) - dataset_assembler.write_measurements_odc_xarray( - ds[[dataarray]], nodata=nodata - ) + # Write measurements from xarray (this will loop through each + # array in the dataset and export them with correct nodata values) + log.info(f"{run_id}: Writing output arrays") + dataset_assembler.write_measurements_odc_xarray( + ds, + overviews=(2, 4, 8, 16, 32), + overview_resampling=Resampling.nearest, + ) # Add lineage s2_set = set(d.id for d in s2_lineage) if s2_lineage else [] diff --git a/metadata/ga_s2ls_intertidal_cyear_3.odc-product.yaml b/metadata/ga_s2ls_intertidal_cyear_3.odc-product.yaml index 617ab30..5978349 100644 --- a/metadata/ga_s2ls_intertidal_cyear_3.odc-product.yaml +++ b/metadata/ga_s2ls_intertidal_cyear_3.odc-product.yaml @@ -27,25 +27,25 @@ measurements: - uncertainty - name: exposure - dtype: int16 + dtype: uint8 units: "percent" - nodata: -999 + nodata: 255 - name: extents - dtype: int16 + dtype: uint8 units: "class" - nodata: -999 + nodata: 255 flags_definition: extents: description: Intertidal extents class bits: [0, 1, 2, 3, 4, 5, 6, 7] values: - 0: test0 - 1: test1 - 2: test2 - 3: test3 - 4: test4 - 5: test4 + 0: Dry + 1: Inland intermittent wet + 2: Inland persistent wet + 3: Tidal influenced persistent wet + 4: Intertidal low confidence + 5: Intertidal high confidence - name: ta_hat dtype: float32 @@ -75,23 +75,23 @@ measurements: aliases: - lat - name: ta_offset_high - dtype: int16 + dtype: uint8 units: "percent" - nodata: -999 + nodata: 255 aliases: - offset_high - name: ta_offset_low - dtype: int16 + dtype: uint8 units: "percent" - nodata: -999 + nodata: 255 aliases: - offset_low - name: ta_spread - dtype: int16 + dtype: uint8 units: "percent" - nodata: -999 + nodata: 255 aliases: - spread @@ -103,9 +103,9 @@ measurements: - ndwi_corr - name: qa_ndwi_freq - dtype: int16 + dtype: uint8 units: "percent" - nodata: -999 + nodata: 255 aliases: - ndwi_freq diff --git a/tests/README.md b/tests/README.md index bab1b51..0583d92 100644 --- a/tests/README.md +++ b/tests/README.md @@ -10,7 +10,7 @@ Integration tests This directory contains tests that are run to verify that DEA Intertidal code runs correctly. The ``test_intertidal.py`` file runs a small-scale full workflow analysis over an intertidal flat in the Gulf of Carpentaria using the DEA Intertidal [Command Line Interface (CLI) tools](../notebooks/Intertidal_CLI.ipynb), and compares these results against a LiDAR validation DEM to produce some simple accuracy metrics. -The latest integration test completed at **2024-03-12 16:16**. Compared to the previous run, it had an: +The latest integration test completed at **2024-03-13 11:54**. Compared to the previous run, it had an: - RMSE accuracy of **0.14 m ( :heavy_minus_sign: no change)** - MAE accuracy of **0.12 m ( :heavy_minus_sign: no change)** - Bias of **0.12 m ( :heavy_minus_sign: no change)** diff --git a/tests/validation.csv b/tests/validation.csv index 0272267..c64aa48 100644 --- a/tests/validation.csv +++ b/tests/validation.csv @@ -44,3 +44,4 @@ time,Correlation,RMSE,MAE,R-squared,Bias,Regression slope 2024-03-07 07:52:01.276183+00:00,0.977,0.147,0.126,0.955,0.121,1.125 2024-03-08 04:13:32.992746+00:00,0.975,0.141,0.121,0.95,0.116,1.11 2024-03-12 05:16:34.997844+00:00,0.975,0.141,0.121,0.95,0.116,1.11 +2024-03-13 00:54:01.731099+00:00,0.975,0.141,0.121,0.95,0.116,1.11 diff --git a/tests/validation.jpg b/tests/validation.jpg index 362be4b..08fd238 100644 Binary files a/tests/validation.jpg and b/tests/validation.jpg differ