From 919a987814a9d5771233e4a2ec4c3446490b1c94 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Thu, 14 Nov 2024 15:04:39 +1100 Subject: [PATCH] Webmercator anti-meridian handling (#1083) * Hackathon WIP * Fix Docker image for debugging. * Additional docker fixes from #1080 * Tweak antimeridian hack and timezone handling. * Misc cleanup. * Fix tests - auto-publish 3832 if 3857 is published. * Fix tests - pass boxed point instead of raw point. * What layers are getting hidden? * Layer reprs. * Typo from a couple of commits ago. * REALLY fix typo (sigh) * Workaround core bug and cleanup * print to echo fix * Remove separate test db build script. --- .env_ows_root | 4 +- .env_simple | 4 +- .../workflows/complementary-config-test.yaml | 2 +- .github/workflows/test.yml | 4 +- Dockerfile | 11 +- build-test-db.sh | 153 ------------------ check-code-all.sh | 3 +- datacube_ows/feature_info.py | 10 +- datacube_ows/index/postgis/api.py | 11 +- datacube_ows/index/postgis/product_ranges.py | 10 +- datacube_ows/ows_configuration.py | 21 ++- datacube_ows/startup_utils.py | 3 +- datacube_ows/time_utils.py | 14 +- datacube_ows/update_ranges_impl.py | 22 +-- datacube_ows/wms_utils.py | 35 ++-- docker-compose.yaml | 8 +- integration_tests/cfg/ows_test_cfg.py | 2 +- integration_tests/test_mv_index.py | 1 + integration_tests/test_wcs_server.py | 4 +- setup.py | 7 +- tests/test_wms_utils.py | 16 +- 21 files changed, 108 insertions(+), 237 deletions(-) delete mode 100755 build-test-db.sh diff --git a/.env_ows_root b/.env_ows_root index 044f908bc..7410ce533 100644 --- a/.env_ows_root +++ b/.env_ows_root @@ -12,7 +12,7 @@ DB_DATABASE=opendatacube ################# # OWS CFG Config ################# -PYTHONPATH=/env/config +PYTHONPATH=/src/config DATACUBE_OWS_CFG=ows_refactored.ows_root_cfg.ows_cfg ################ @@ -21,7 +21,7 @@ DATACUBE_OWS_CFG=ows_refactored.ows_root_cfg.ows_cfg # OWS_CFG_DIR config enables mounting an external CFG folder OWS_CFG_DIR=~/dea-config/dev/services/wms/ows_refactored # OWS_CFG_MOUNT_DIR defines the mount inside docker container -OWS_CFG_MOUNT_DIR=/env/config/ows_refactored +OWS_CFG_MOUNT_DIR=/src/config/ows_refactored ################ # AWS S3 Config diff --git a/.env_simple b/.env_simple index 50b94b065..63a028e36 100644 --- a/.env_simple +++ b/.env_simple @@ -19,7 +19,7 @@ POSTGRES_DB="odc_postgres,odc_postgis" # OWS CFG Config ################# DATACUBE_OWS_CFG=config.ows_test_cfg.ows_cfg -PYTHONPATH=/env +PYTHONPATH=/src ################ # Docker Volume @@ -27,7 +27,7 @@ PYTHONPATH=/env # OWS_CFG_DIR config enables mounting an external CFG folder OWS_CFG_DIR=./integration_tests/cfg # OWS_CFG_MOUNT_DIR defines the mount inside docker container -OWS_CFG_MOUNT_DIR=/env/config +OWS_CFG_MOUNT_DIR=/src/config ################ # AWS S3 Config diff --git a/.github/workflows/complementary-config-test.yaml b/.github/workflows/complementary-config-test.yaml index 4488c0f7b..ac68352f6 100644 --- a/.github/workflows/complementary-config-test.yaml +++ b/.github/workflows/complementary-config-test.yaml @@ -63,5 +63,5 @@ jobs: docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml up -d --wait docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "datacube system init; datacube system check" docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "curl https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/dev/services/wms/inventory.json -o /tmp/inventory.json" - docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code && ./compare-cfg.sh" + docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /src && ./compare-cfg.sh" docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml down diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c22819957..0a37e9e94 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -55,7 +55,7 @@ jobs: - name: Test and lint dev OWS image run: | mkdir artifacts - docker run -e LOCAL_UID=$(id -u $USER) -e LOCAL_GID=$(id -g $USER) -v ${PWD}/artifacts:/mnt/artifacts ${ORG}/${IMAGE}:_builder /bin/sh -c "cd /code && ./check-code.sh" + docker run -e LOCAL_UID=$(id -u $USER) -e LOCAL_GID=$(id -g $USER) -v ${PWD}/artifacts:/mnt/artifacts ${ORG}/${IMAGE}:_builder /bin/sh -c "cd /src && ./check-code.sh" mv ./artifacts/coverage.xml ./artifacts/coverage-unit.xml - name: Dockerized Integration Pytest @@ -64,7 +64,7 @@ jobs: export LOCAL_GID=$(id -g $USER) export $(grep -v '^#' .env_simple | xargs) docker compose -f docker-compose.yaml -f docker-compose.db.yaml up -d --wait --build - docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code && ./check-code-all.sh" + docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /src && ./check-code-all.sh" docker compose -f docker-compose.yaml -f docker-compose.db.yaml down - name: Upload All coverage to Codecov diff --git a/Dockerfile b/Dockerfile index bf69f07bc..623ab635f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Note that this is now pinned to a fixed version. Remember to check for new versions periodically. -FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.1 AS builder +FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.2 AS builder # Environment is test or deployment. ARG ENVIRONMENT=deployment @@ -26,7 +26,7 @@ RUN python3 -m pip --disable-pip-version-check -q wheel --no-binary psycopg2 psy python3 -m pip --disable-pip-version-check -q wheel --no-binary pyproj pyproj) # Should match builder base. -FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.1 +FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.2 # Environment is test or deployment. ARG ENVIRONMENT=deployment @@ -40,6 +40,7 @@ RUN export DEBIAN_FRONTEND=noninteractive \ && ([ "$ENVIRONMENT" = "deployment" ] || \ apt-get install -y --no-install-recommends \ proj-bin) \ + && apt-get upgrade -y \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/dpkg/* /var/tmp/* /var/log/dpkg.log @@ -47,8 +48,8 @@ RUN export DEBIAN_FRONTEND=noninteractive \ COPY --chown=root:root --link docker/files/remap-user.sh /usr/local/bin/remap-user.sh # Copy source code and install it -WORKDIR /code -COPY . /code +WORKDIR /src +COPY . /src ## Only install pydev requirements if arg PYDEV_DEBUG is set to 'yes' ARG PYDEV_DEBUG="no" @@ -62,7 +63,7 @@ RUN EXTRAS=$([ "$ENVIRONMENT" = "deployment" ] || echo ",test") && \ python3 -m pip --disable-pip-version-check install --no-cache-dir .[dev] --break-system-packages) && \ python3 -m pip freeze && \ ([ "$ENVIRONMENT" != "deployment" ] || \ - (rm -rf /code/* /code/.git* && \ + (rm -rf /src/* /src/.git* && \ apt-get purge -y \ git \ git-man \ diff --git a/build-test-db.sh b/build-test-db.sh deleted file mode 100755 index e2df9a974..000000000 --- a/build-test-db.sh +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env bash -# Convenience script for running Travis-like checks. -set -ex - -# ensure db is ready -sh ./docker/ows/wait-for-db - -# Initialise ODC schemas - -datacube system init -datacube -E owspostgis system init - -# Add extended metadata types - -datacube metadata add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_landsat_ard.odc-type.yaml -datacube metadata add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_sentinel_ard.odc-type.yaml - -datacube -E owspostgis metadata add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_landsat_ard.odc-type.yaml -datacube -E owspostgis metadata add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/product_metadata/eo3_sentinel_ard.odc-type.yaml - -# Test products -datacube product add ./integration_tests/metadata/s2_l2a_prod.yaml -datacube product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/c3/ga_s2am_ard_3.odc-product.yaml -datacube product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/c3/ga_s2bm_ard_3.odc-product.yaml -datacube product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/land_and_vegetation/c3_fc/ga_ls_fc_3.odc-product.yaml - -datacube -E owspostgis product add ./integration_tests/metadata/s2_l2a_prod.yaml -datacube -E owspostgis product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/c3/ga_s2am_ard_3.odc-product.yaml -datacube -E owspostgis product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/c3/ga_s2bm_ard_3.odc-product.yaml -datacube -E owspostgis product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/land_and_vegetation/c3_fc/ga_ls_fc_3.odc-product.yaml - -# add flag masking products -datacube product add ./integration_tests/metadata/product_geodata_coast_100k.yaml -datacube product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/inland_water/c3_wo/ga_ls_wo_3.odc-product.yaml - -datacube -E owspostgis product add ./integration_tests/metadata/product_geodata_coast_100k.yaml -datacube -E owspostgis product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/inland_water/c3_wo/ga_ls_wo_3.odc-product.yaml - -# Geomedian for summary product testing - -datacube product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/geomedian-au/ga_ls8c_nbart_gm_cyear_3.odc-product.yaml -datacube -E owspostgis product add https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/baseline_satellite_data/geomedian-au/ga_ls8c_nbart_gm_cyear_3.odc-product.yaml - -# S2 datasets from us-west-2 and eo3ified geodata_coast -MDL=./integration_tests/metadata -python ${MDL}/metadata_importer.py < d @log_call -def _make_derived_band_dict(pixel_dataset: xarray.Dataset, style_index: dict[str, StyleDef]) -> dict[str, int | float | str]: +def _make_derived_band_dict( + pixel_dataset: xarray.Dataset, style_index: dict[str, StyleDef] +) -> dict[str, str | int | float]: """Creates a dict of values for bands derived by styles. This only works for styles with an `index_function` defined. @@ -120,7 +122,7 @@ def _make_derived_band_dict(pixel_dataset: xarray.Dataset, style_index: dict[str :param dict(str, StyleCfg) style_index: dict of style configuration dicts :return: dict of style names to derived value """ - derived_band_dict = {} + derived_band_dict: dict[str, int | float | str] = {} for style_name, style in style_index.items(): if not style.include_in_feature_info: continue @@ -158,7 +160,7 @@ def feature_info(args: dict[str, str]) -> FlaskResponse: stacker = DataStacker(params.layer, geo_point_geobox, params.times) # --- Begin code section requiring datacube. cfg = get_config() - all_time_datasets = stacker.datasets_all_time(point=geo_point) + all_time_datasets = stacker.datasets_all_time(point=geo_point_geobox.extent) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = cast(str, cfg.published_CRSs[params.crsid]["horizontal_coord"]) @@ -174,7 +176,7 @@ def feature_info(args: dict[str, str]) -> FlaskResponse: global_info_written = False feature_json["data"] = [] fi_date_index: dict[datetime, RAW_CFG] = {} - time_datasets = stacker.datasets(all_flag_bands=True, point=geo_point) + time_datasets = stacker.datasets(all_flag_bands=True, point=geo_point_geobox.extent) data = stacker.data(time_datasets, skip_corrections=True) if data is not None: for dt in data.time.values: diff --git a/datacube_ows/index/postgis/api.py b/datacube_ows/index/postgis/api.py index 3e260d302..58a6de671 100644 --- a/datacube_ows/index/postgis/api.py +++ b/datacube_ows/index/postgis/api.py @@ -14,6 +14,7 @@ from odc.geo import Geometry, CRS from datacube import Datacube from datacube.model import Product, Dataset, Range +from antimeridian import fix_shape from datacube_ows.ows_configuration import OWSNamedLayer from datacube_ows.index.api import OWSAbstractIndex, OWSAbstractIndexDriver, LayerSignature, LayerExtent, TimeSearchTerm @@ -61,7 +62,15 @@ def _query(self, ) -> dict[str, Any]: query: dict[str, Any] = {} if geom: - query["geopolygon"] = self._prep_geom(layer, geom) + if geom.crs and geom.crs in layer.dc.index.spatial_indexes(): + query["geopolygon"] = geom + else: + # Default to 4326 and take a long hard look at yourself. + prepared_geom = self._prep_geom(layer, geom) + assert prepared_geom is not None + geopoly = prepared_geom.to_crs("epsg:4326") + geopoly = Geometry(fix_shape(geopoly.geom), crs="epsg:4326") + query["geopolygon"] = geopoly if products is not None: query["product"] = [p.name for p in products] if times is not None: diff --git a/datacube_ows/index/postgis/product_ranges.py b/datacube_ows/index/postgis/product_ranges.py index 430b39c5c..76cdb4d5f 100644 --- a/datacube_ows/index/postgis/product_ranges.py +++ b/datacube_ows/index/postgis/product_ranges.py @@ -156,14 +156,14 @@ def create_range_entry(layer: OWSNamedLayer, cache: dict[LayerSignature, list[st # Get extent polygon from materialised views base_crs = CRS(layer.native_CRS) - if base_crs not in layer.dc.index.spatial_indexes(): - click.echo(f"Native CRS for layer {layer.name} ({layer.native_CRS}) does not have a spatial index. " - "Using epsg:4326 for extent calculations.") - base_crs = CRS("EPSG:4326") - base_extent = None for product in layer.products: prod_extent = layer.dc.index.products.spatial_extent(product, base_crs) + if prod_extent is None: + # Workaround - this should be handled in core. + prod_extent = layer.dc.index.products.spatial_extent(product) + if prod_extent is not None: + prod_extent = prod_extent.to_crs(base_crs) if base_extent is None: base_extent = prod_extent else: diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index ca8a15e97..9447a3d1a 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -396,6 +396,9 @@ def make_ready(self, *args, **kwargs) -> None: self.unready_layers = still_unready super().make_ready(*args, **kwargs) + def __repr__(self) -> str: + return f"OWS Folder <{self.title}>" + class TimeRes(Enum): SUBDAY = "subday" @@ -1041,6 +1044,9 @@ def lookup_impl(cls, cfg: "OWSConfig", keyvals: dict[str, str], subs: CFG_DICT | except KeyError: raise OWSEntryNotFound(f"Layer {keyvals['layer']} not found") + def __repr__(self) -> str: + return f"OWS Layer <{self.name}>" + class OWSProductLayer(OWSNamedLayer): multi_product = False @@ -1311,7 +1317,7 @@ def make_ready(self, *args: Any, **kwargs: Any) -> None: try: self.dc: Datacube = Datacube(env=self.default_env, app=self.odc_app) except Exception as e: - _LOG.error("ODC initialisation failed: %s", str(e)) + _LOG.error("ODC initialisation of env %s failed: %s", self.default_env._name, str(e)) raise ODCInitException(e) if self.msg_file_name: try: @@ -1433,6 +1439,19 @@ def make_gml_name(name): else: self.default_geographic_CRS = geographic_CRSs[0] + if "EPSG:3857" not in self.published_CRSs: + _LOG.warning("EPSG:3857 (Web mercator) is not a published CRS") + elif "EPSG:3832" not in self.published_CRSs: + # Have web merc but not Pacific Web merc - just add it silently. + self.published_CRSs["EPSG:3832"] = { + "geographic": False, + "horizontal_coord": "x", + "vertical_coord": "y", + "vertical_coord_first": False, + "gml_name": make_gml_name("EPSG:3832"), + "alias_of": None + } + for alias, alias_def in CRS_aliases.items(): target_crs = cast(str, alias_def["alias"]) if target_crs not in self.published_CRSs: diff --git a/datacube_ows/startup_utils.py b/datacube_ows/startup_utils.py index 04d0a87b6..aa50fd2c3 100644 --- a/datacube_ows/startup_utils.py +++ b/datacube_ows/startup_utils.py @@ -178,7 +178,8 @@ def parse_config_file(log=None): def initialise_flask(name): - app = Flask(name.split('.')[0]) + app_path = os.path.dirname(os.path.abspath(__file__)) + app = Flask(name.split('.')[0], template_folder=os.path.join(app_path, 'templates')) return app def pass_through(undecorated): diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py index 6e5f27955..c916d14ae 100644 --- a/datacube_ows/time_utils.py +++ b/datacube_ows/time_utils.py @@ -112,7 +112,7 @@ def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> tuple[datetim :param date: A date object :return: A tuple of two UTC datetime objects, spanning 1 second shy of 24 hours. """ - tz: datetime.tzinfo = tz_for_geometry(geobox.geographic_extent) + tz: datetime.tzinfo = tz_for_geometry(geobox.extent) start = datetime.datetime(date.year, date.month, date.day, 0, 0, 0, tzinfo=tz) end = datetime.datetime(date.year, date.month, date.day, 23, 59, 59, tzinfo=tz) return (start.astimezone(utc), end.astimezone(utc)) @@ -174,20 +174,14 @@ def tz_for_geometry(geom: Geometry) -> datetime.tzinfo: :return: A timezone object """ crs_geo = CRS("EPSG:4326") - geo_geom: Geometry = geom.to_crs(crs_geo) - centroid: Geometry = geo_geom.centroid + raw_centroid = geom.centroid + centroid: Geometry = raw_centroid.to_crs(crs_geo) try: # 1. Try being smart with the centroid of the geometry return tz_for_coord(centroid.coords[0][0], centroid.coords[0][1]) except NoTimezoneException: pass - for pt in geo_geom.boundary.coords: - try: - # 2. Try being smart all the points in the geometry - return tz_for_coord(pt[0], pt[1]) - except NoTimezoneException: - pass - # 3. Meh, just use longitude + # 2. Meh, just use longitude offset = round(centroid.coords[0][0] / 15.0) return datetime.timezone(datetime.timedelta(hours=offset)) diff --git a/datacube_ows/update_ranges_impl.py b/datacube_ows/update_ranges_impl.py index 5f5318956..0c66c1ff7 100755 --- a/datacube_ows/update_ranges_impl.py +++ b/datacube_ows/update_ranges_impl.py @@ -94,31 +94,31 @@ def main(layers: list[str], """ # --version if version: - print("Open Data Cube Open Web Services (datacube-ows) version", __version__) + click.echo(f"Open Data Cube Open Web Services (datacube-ows) version {__version__}") sys.exit(0) # Handle old-style calls if not layers: layers = [] if schema and layers: - print("Sorry, cannot update the schema and ranges in the same invocation.") + click.echo("Sorry, cannot update the schema and ranges in the same invocation.") sys.exit(1) if schema and views: - print("Sorry, No point in updating materialised views and updating the schema in the same invocation.") + click.echo("Sorry, No point in updating materialised views and updating the schema in the same invocation.") sys.exit(1) elif cleanup and layers: - print("Sorry, cannot cleanup 1.8.x database entities and update ranges in the same invocation.") + click.echo("Sorry, cannot cleanup 1.8.x database entities and update ranges in the same invocation.") sys.exit(1) elif views and cleanup: - print("Sorry, cannot update the materialised views and cleanup the database in the same invocation.") + click.echo("Sorry, cannot update the materialised views and cleanup the database in the same invocation.") sys.exit(1) elif views and layers: - print("Sorry, cannot update the materialised views and ranges in the same invocation.") + click.echo("Sorry, cannot update the materialised views and ranges in the same invocation.") sys.exit(1) elif read_role and (views or layers): - print("Sorry, read-role can't be granted with view or range updates") + click.echo("Sorry, read-role can't be granted with view or range updates") sys.exit(1) elif write_role and (views or layers): - print("Sorry, write-role can't be granted with view or range updates") + click.echo("Sorry, write-role can't be granted with view or range updates") sys.exit(1) initialise_debugging() @@ -161,7 +161,7 @@ def main(layers: list[str], sys.exit(1) return 0 - print("Deriving extents from materialised views") + click.echo("Deriving extents from materialised views and/or spatial indexes") try: errors = add_ranges(cfg, layers) click.echo("Done.") @@ -174,7 +174,7 @@ def main(layers: list[str], elif isinstance(e.orig, psycopg2.errors.NotNullViolation): click.echo("ERROR: OWS materialised views are most likely missing a newly indexed product") click.echo("") - click.echo(" Try running with the --viewes options first.") + click.echo(" Try running with the --views options first.") sys.exit(1) else: raise e @@ -197,5 +197,5 @@ def add_ranges(cfg: OWSConfig, layer_names: list[str]) -> bool: layer = cfg.layer_index[name] layer.ows_index().create_range_entry(layer, cache) - print("Done.") + click.echo("Done.") return errors diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index f142ccbcb..39bb7c8c1 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -26,6 +26,7 @@ from datacube_ows.styles.expression import ExpressionException from datacube_ows.utils import default_to_utc, find_matching_date + RESAMPLING_METHODS = { 'nearest': Resampling.nearest, 'cubic': Resampling.cubic, @@ -67,23 +68,25 @@ def _get_geobox_xy(args, crs): return minx, miny, maxx, maxy -def _get_geobox(args, src_crs, dst_crs=None): +def _get_geobox(args, crs): width = int(args['width']) height = int(args['height']) - minx, miny, maxx, maxy = _get_geobox_xy(args, src_crs) + minx, miny, maxx, maxy = _get_geobox_xy(args, crs) if minx == maxx or miny == maxy: raise WMSException("Bounding box must enclose a non-zero area") - if dst_crs is not None: - minx, miny, maxx, maxy = _bounding_pts( - minx, miny, - maxx, maxy, - src_crs, dst_crs=dst_crs - ) - out_crs = src_crs if dst_crs is None else dst_crs + if crs.epsg == 3857 and (maxx < -13_000_000 or minx > 13_000_000): + # EPSG:3857 query AND closer to the anti-meridian than the prime meridian: + # re-project to epsg:3832 (Pacific Web-Mercator) + ll = geom.point(x=minx, y=miny, crs=crs).to_crs("epsg:3832") + ur = geom.point(x=maxx, y=maxy, crs=crs).to_crs("epsg:3832") + minx, miny = ll.coords[0] + maxx, maxy = ur.coords[0] + crs = geom.CRS("epsg:3832") + return create_geobox( - out_crs, + crs, minx, miny, maxx, maxy, width, height ) @@ -106,11 +109,11 @@ def zoom_factor(args, crs): # Project to a geographic coordinate system # This is why we can't just use the regular geobox. The scale needs to be # "standardised" in some sense, not dependent on the CRS of the request. - geo_crs = geom.CRS("EPSG:4326") + # TODO: can we do better in polar regions? minx, miny, maxx, maxy = _bounding_pts( minx, miny, maxx, maxy, - crs, dst_crs=geo_crs + crs, dst_crs="epsg:4326" ) # Create geobox affine transformation (N.B. Don't need an actual Geobox) affine = Affine.translation(minx, miny) * Affine.scale((maxx - minx) / width, (maxy - miny) / height) @@ -312,6 +315,11 @@ def __init__(self, args): self.geometry = _get_polygon(args, self.crs) # BBox, height and width parameters self.geobox = _get_geobox(args, self.crs) + # Web-merc antimeridian hack: + if self.geobox.crs != self.crs: + self.crs = self.geobox.crs + self.geometry = self.geometry.to_crs(self.crs) + # Time parameter self.times = get_times(args, self.layer) @@ -489,8 +497,7 @@ def solar_correct_data(data, dataset): native_x = (dataset.bounds.right + dataset.bounds.left) / 2.0 native_y = (dataset.bounds.top + dataset.bounds.bottom) / 2.0 pt = geom.point(native_x, native_y, dataset.crs) - crs_geo = geom.CRS("EPSG:4326") - geo_pt = pt.to_crs(crs_geo) + geo_pt = pt.to_crs("epsg:4326") data_time = dataset.center_time.astimezone(utc) data_lon, data_lat = geo_pt.coords[0] diff --git a/docker-compose.yaml b/docker-compose.yaml index 9fdf0f6ef..7a30b721e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -24,13 +24,15 @@ services: ODC_DEFAULT_INDEX_DRIVER: postgres ODC_OWSPOSTGIS_INDEX_DRIVER: postgis # Please switch to single entry url configuration for postgres url + ODC_ODC2_INDEX_DRIVER: ${ODC_ODC2_INDEX_DRIVER} + ODC_ODC2_DB_URL: ${ODC_ODC2_DB_URL} ODC_DEFAULT_DB_URL: ${ODC_DEFAULT_DB_URL} ODC_OWSPOSTGIS_DB_URL: ${ODC_OWSPOSTGIS_DB_URL} # for wait-for-db check POSTGRES_USER: ${POSTGRES_USER} POSTGRES_HOSTNAME: ${POSTGRES_HOSTNAME} SERVER_DB_USERNAME: ${SERVER_DB_USERNAME} - # Path from the PYTHONPATH to the config object (default PYTHONPATH is /env) + # Path from the PYTHONPATH to the config object (default PYTHONPATH is /src) PYTHONPATH: ${PYTHONPATH} DATACUBE_OWS_CFG: ${DATACUBE_OWS_CFG} AWS_DEFAULT_REGION: ${AWS_REGION} @@ -39,13 +41,13 @@ services: # Enable Metrics prometheus_multiproc_dir: ${prometheus_multiproc_dir} # Dev flags - FLASK_APP: /code/datacube_ows/ogc.py + FLASK_APP: /src/datacube_ows/ogc.py FLASK_ENV: ${FLASK_ENV} PYDEV_DEBUG: "${PYDEV_DEBUG}" SENTRY_DSN: "${SENTRY_DSN}" volumes: - ${OWS_CFG_DIR}:${OWS_CFG_MOUNT_DIR} - - ./:/code/ + - ./:/src/ - ./artifacts:/mnt/artifacts restart: always command: ["flask", "run", "--host=0.0.0.0", "--port=8000"] diff --git a/integration_tests/cfg/ows_test_cfg.py b/integration_tests/cfg/ows_test_cfg.py index 2b1b6b9a1..3eb993f58 100644 --- a/integration_tests/cfg/ows_test_cfg.py +++ b/integration_tests/cfg/ows_test_cfg.py @@ -13,7 +13,7 @@ trans_dir = "." else: cfgbase = "config." - trans_dir = "/code" + trans_dir = "/src" # THIS IS A TESTING FILE diff --git a/integration_tests/test_mv_index.py b/integration_tests/test_mv_index.py index ea158d004..42b0503ae 100644 --- a/integration_tests/test_mv_index.py +++ b/integration_tests/test_mv_index.py @@ -47,6 +47,7 @@ def __init__(self, geom): if geom.crs != "EPSG:4326": geom = geom.to_crs("EPSG:4326") self.geographic_extent = geom + self.extent = geom def test_time_search(): diff --git a/integration_tests/test_wcs_server.py b/integration_tests/test_wcs_server.py index 4a845b6fd..70e132242 100644 --- a/integration_tests/test_wcs_server.py +++ b/integration_tests/test_wcs_server.py @@ -1194,7 +1194,9 @@ def test_wcs20_getcoverage_multidate_netcdf(ows_server): # Ensure that we have at least some layers available contents = list(wcs.contents) - assert len(contents) == 12 + for layer_cfg in cfg.layer_index.values(): + assert not layer_cfg.hide + assert len(contents) == len(list(cfg.active_products)) for i in (0, 11): layer = cfg.layer_index[contents[i]] extent = ODCExtent(layer) diff --git a/setup.py b/setup.py index c0c23ad80..02fb8f7a6 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ from setuptools import find_packages, setup install_requirements = [ - 'datacube[performance,s3]>=1.9.0-rc9', + 'datacube[performance,s3]>=1.9.0-rc11', 'flask', 'requests', 'affine', @@ -22,6 +22,7 @@ 'importlib_metadata', 'matplotlib', 'pyparsing', + 'antimeridian', 'numpy>=1.22', 'scipy', 'Pillow>=10.2.0', @@ -51,17 +52,15 @@ ] dev_requirements = [ - 'pydevd-pycharm~=232.10072.31', + 'pydevd-pycharm~=242.23339.19', 'pylint', 'sphinx_click', 'pre-commit', - 'pipdeptree', 'mypy', 'flake8', 'types-pytz', 'types-python-dateutil', 'types-requests', - ] operational_requirements = [ diff --git a/tests/test_wms_utils.py b/tests/test_wms_utils.py index ed6bf456f..a09b2f5e4 100644 --- a/tests/test_wms_utils.py +++ b/tests/test_wms_utils.py @@ -262,23 +262,11 @@ def test_get_geobox(): "height": "256", "bbox": "-43.28507087113431,146.18504300790977,-43.07072582535469,146.64289867785524", }, - src_crs=CRS("EPSG:4326") + crs=CRS("EPSG:4326") ) assert gbox.affine assert str(gbox.crs) == "EPSG:4326" - gbox = datacube_ows.wms_utils._get_geobox( - args={ - "width": "256", - "height": "256", - "bbox": "-43.28507087113431,146.18504300790977,-43.07072582535469,146.64289867785524", - }, - src_crs=CRS("EPSG:4326"), - dst_crs=CRS("EPSG:3857") - ) - assert gbox.affine - assert str(gbox.crs) == "EPSG:3857" - with pytest.raises(WMSException) as e: gbox = datacube_ows.wms_utils._get_geobox( args={ @@ -286,7 +274,7 @@ def test_get_geobox(): "height": "256", "bbox": "-43.28507087113431,146.18504300790977,-43.28507087113431,146.64289867785524", }, - src_crs = CRS("EPSG:4326") + crs = CRS("EPSG:4326") ) assert "Bounding box must enclose a non-zero area" in str(e.value) OWSConfig._instance = None