From f7840cb892a622c82e1c587c0bbc826ce558187f Mon Sep 17 00:00:00 2001
From: Tjalling-dejong <93266159+Tjalling-dejong@users.noreply.github.com>
Date: Tue, 3 Sep 2024 16:24:08 +0200
Subject: [PATCH] Add gfm (#40)
* restructured providers
* moved auth to providers
* expanded gfm class
* WIP fixing auth for gfm api
* fixed gfm api call for posting aoi
* added available_data method to GFM
* add select_data method
* added data export for GFM
* added tests for GFM workflow
* WIP on GFM.view_data()
* added view_data method to gfm class
* making eo-floods ruff compliant
* added docstrings and typing
* added docstrings and typing to leaflet.py
* added __init__.py
* added typing and docstrings to auth.py
* added support for authenticating with environment variables
* updated gfm tests
* added testpath and ruff exclude
* improved typing
* improved structure
* improved hydrafloods auth
* added docstring
* updated example notebook
---
EO_Floods/__init__.py | 9 +-
EO_Floods/auth.py | 43 ----
EO_Floods/floodmap.py | 231 ++++++++++--------
EO_Floods/leaflet.py | 49 ----
EO_Floods/providers/GFM/__init__.py | 0
EO_Floods/providers/GFM/auth.py | 81 ++++++
EO_Floods/providers/GFM/gfm.py | 150 ++++++++++++
EO_Floods/providers/GFM/leaflet.py | 81 ++++++
EO_Floods/providers/__init__.py | 4 +-
EO_Floods/providers/gfm.py | 31 ---
EO_Floods/providers/hydrafloods/__init__.py | 3 +
EO_Floods/providers/hydrafloods/auth.py | 27 ++
.../{ => providers/hydrafloods}/dataset.py | 65 +++++
.../{ => hydrafloods}/hydrafloods.py | 224 +++++++----------
environment.yml | 16 +-
notebooks/GFM_example.ipynb | 195 +++++++++++++--
notebooks/hydrafloods_example.ipynb | 50 ++--
pyproject.toml | 11 +
tests/conftest.py | 2 +-
tests/test_floodmap.py | 2 +-
tests/test_gfm.py | 87 +++++++
tests/test_hydrafloods.py | 3 +-
22 files changed, 943 insertions(+), 421 deletions(-)
delete mode 100644 EO_Floods/auth.py
delete mode 100644 EO_Floods/leaflet.py
create mode 100644 EO_Floods/providers/GFM/__init__.py
create mode 100644 EO_Floods/providers/GFM/auth.py
create mode 100644 EO_Floods/providers/GFM/gfm.py
create mode 100644 EO_Floods/providers/GFM/leaflet.py
delete mode 100644 EO_Floods/providers/gfm.py
create mode 100644 EO_Floods/providers/hydrafloods/__init__.py
create mode 100644 EO_Floods/providers/hydrafloods/auth.py
rename EO_Floods/{ => providers/hydrafloods}/dataset.py (53%)
rename EO_Floods/providers/{ => hydrafloods}/hydrafloods.py (68%)
create mode 100644 tests/test_gfm.py
diff --git a/EO_Floods/__init__.py b/EO_Floods/__init__.py
index 908cebc..a810196 100644
--- a/EO_Floods/__init__.py
+++ b/EO_Floods/__init__.py
@@ -1,3 +1,10 @@
-""" An easy to use interface for deriving flood maps from earth observation data"""
+"""An easy to use interface for deriving flood maps from earth observation data.""" # noqa: N999
+
+from dotenv import load_dotenv
+
+from EO_Floods.floodmap import FloodMap
+
+load_dotenv()
__version__ = "2023.12"
+__all__ = ["FloodMap"]
diff --git a/EO_Floods/auth.py b/EO_Floods/auth.py
deleted file mode 100644
index 8e1cb58..0000000
--- a/EO_Floods/auth.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import os
-import logging
-import getpass
-import requests
-import json
-
-import ee
-
-log = logging.getLogger(__name__)
-
-
-def ee_initialize(token_name="EARTHENGINE_TOKE"):
- """Authenticates Earth Engine and intializes an Earth Engine session."""
- if ee.data._credentials is None:
- try:
- ee_token = os.environ.get(token_name)
- if ee_token is not None:
- credentials_file_path = os.path.expanduser("~/.config/earthengine/")
- if not os.path.exists(credentials_file_path):
- credential = '{"refresh_token":"%s"}' % ee_token
- os.makedirs(credentials_file_path, exist_ok=True)
- with open(credentials_file_path + "credentials", "w") as file:
- file.write(credential)
- ee.Initialize()
- except Exception:
- ee.Authenticate()
- ee.Initialize()
- ee.Initialize()
-
-
-def GFM_authenticate() -> dict:
- print(
- "To authenticate to the GFM API please enter your email and your password in the following prompts"
- )
- email = input()
- password = getpass.getpass()
- url = "https://api.gfm.eodc.eu/v1/auth/login"
- r = requests.post(url=url, json={"email": email, "password": password})
- if r.status_code == 200:
- print("Successfully authenticated to the GFM API")
- return r.json()
- else:
- raise r.raise_for_status()
diff --git a/EO_Floods/floodmap.py b/EO_Floods/floodmap.py
index 39c4ccb..8b457cb 100644
--- a/EO_Floods/floodmap.py
+++ b/EO_Floods/floodmap.py
@@ -1,29 +1,35 @@
-from typing import List, Optional
+"""General API for flood maps in EO-Floods."""
+
+from __future__ import annotations
+
import logging
import sys
+from typing import TYPE_CHECKING, Any
-import geemap.foliumap as geemap
-
-from EO_Floods.dataset import DATASETS, Dataset
-from EO_Floods.utils import get_dates_in_time_range, dates_within_daterange
-from EO_Floods.providers import HydraFloods
+from EO_Floods.providers import GFM, HydraFloods
+from EO_Floods.providers.hydrafloods.dataset import DATASETS, Dataset
+from EO_Floods.utils import dates_within_daterange, get_dates_in_time_range
+if TYPE_CHECKING:
+ import geemap.foliumap as geemap
+ import ipyleaflet
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
log = logging.getLogger(__name__)
-PROVIDERS = ["hydrafloods", "GFM"]
+PROVIDERS = ["Hydrafloods", "GFM"]
class FloodMap:
+ """General API for flood maps in EO-Floods."""
+
def __init__(
self,
start_date: str,
end_date: str,
- geometry: List[float],
- datasets: Optional[List[str] | str] = None,
- provider: Optional[str] = None,
- **kwargs,
+ provider: str,
+ geometry: list[float],
+ datasets: list[str] | str | None = None,
) -> None:
"""Flood map object for creating and exporting flood maps.
@@ -42,63 +48,72 @@ def __init__(
MODIS, and VIIRS. By default None
provider : providers, optional
The dataset provider, by default none
+
"""
self.start_date = start_date
self.end_date = end_date
self.dates = get_dates_in_time_range(
- start_date_str=start_date, end_date_str=end_date
+ start_date_str=start_date,
+ end_date_str=end_date,
)
self.geometry = geometry
self.datasets = _instantiate_datasets(datasets)
- if provider:
- self.provider_name = provider
-
- log.info(f"Provider set as {provider}")
- self._provider = None
-
+ if provider == "GFM":
+ self._provider = GFM(
+ start_date=start_date,
+ end_date=end_date,
+ geometry=geometry,
+ )
+ elif provider == "Hydrafloods":
+ self._provider = HydraFloods(
+ datasets=self.datasets,
+ start_date=start_date,
+ end_date=end_date,
+ geometry=geometry,
+ )
+ else:
+ err_msg = "Provider not given or recognized, choose from [GFM, Hydrafloods]"
+ raise ValueError(err_msg)
+ self.provider_name = provider
+ log.info("Provider set as %s", provider)
log.info("Flood map object initialized")
@property
- def provider_name(self):
+ def provider_name(self) -> str:
+ """Returns name of the provider."""
return self._provider_name
@provider_name.setter
- def provider_name(self, _provider):
+ def provider_name(self, _provider: str) -> None:
if _provider not in PROVIDERS:
- raise ValueError(
- f"Given provider '{_provider}' not supported, choose from: {' ,'.join(PROVIDERS)}"
- )
+ err_msg = f"Given provider '{_provider}' not supported, choose from: {' ,'.join(PROVIDERS)}"
+ raise ValueError(err_msg)
self._provider_name = _provider
@property
- def provider(self):
+ def provider(self) -> GFM | HydraFloods:
"""Property to fetch the provider object."""
return self._provider
- def available_data(self):
- """Prints information of the chosen datasets for the given temporal and
- spatial resolution. The information contains the dataset name, the number
- of images, the timestamp of the images, and a quality score in percentage.
- """
+ def available_data(self) -> None:
+ """Print information of the selected datasets.
- hf = HydraFloods(
- geometry=self.geometry,
- datasets=self.datasets,
- start_date=self.start_date,
- end_date=self.end_date,
- )
- return hf.available_data()
+ The information contains the dataset name, the number
+ of images, the timestamp of the images, and a quality score in percentage of the selected datasets.
+ """
+ self.provider.available_data()
- def view_data(
+ def preview_data(
self,
- datasets: Optional[List[str] | str] = None,
- dates: Optional[List[str] | str] = None,
+ datasets: list[str] | str | None = None,
+ dates: list[str] | str | None = None,
zoom: int = 8,
- vis_params: dict = {},
- ) -> geemap.Map:
- """View data on a geemap instance. This can be used to visually check if
- the quality of the data is sufficient for further processing to flood maps.
- The data can be filtered based on date and dataset name.
+ **kwargs: dict[str, Any],
+ ) -> geemap.Map | None:
+ """View data on a geemap instance.
+
+ This can be used to visually check if the quality of the data is sufficient for further processing to
+ flood maps. The data can be filtered based on date and dataset name.
Parameters
----------
@@ -109,100 +124,100 @@ def view_data(
A subselection of dates to , by default None
zoom : int, optional
zoom level, by default 8
- vis_params : dict, optional
- A dictionary describing the visual parameters for each dataset, by default {}
+ kwargs: dict,
+ keyword arguments passed to the hydrafloods preview data method.
Returns
-------
geemap.Map
a geemap.Map instance to visualize in a jupyter notebook
+
"""
- if dates:
- dates_within_daterange(
- dates=dates, start_date=self.start_date, end_date=self.end_date
- )
+ if self.provider_name == "Hydrafloods":
+ if dates:
+ dates_within_daterange(
+ dates=dates,
+ start_date=self.start_date,
+ end_date=self.end_date,
+ )
- if not datasets:
- _datasets = self.datasets
- else:
- _datasets = _instantiate_datasets(datasets)
- hf = HydraFloods(
- geometry=self.geometry,
- datasets=_datasets,
- start_date=self.start_date,
- end_date=self.end_date,
- )
- return hf.view_data(zoom, dates, vis_params)
+ if datasets:
+ self._provider = HydraFloods(
+ geometry=self.geometry,
+ datasets=_instantiate_datasets(datasets),
+ start_date=self.start_date,
+ end_date=self.end_date,
+ )
+ return self.provider.view_data(
+ zoom=zoom,
+ dates=dates,
+ **kwargs,
+ )
+ log.warning("GFM does not support previewing data")
+ return None
- def generate_flood_extents(
+ def select_data(
self,
- provider: str = "hydrafloods",
- datasets: Optional[List[str] | str] = None,
- dates: Optional[List[str] | str] = None,
- ):
- """Generates flood extents."""
- if provider == "hydrafloods":
- self.provider_name = "hydrafloods"
- if datasets:
- self.datasets = _instantiate_datasets(datasets=datasets)
- self._provider = HydraFloods(
- datasets=self.datasets,
+ dates: list[str] | str | None = None,
+ datasets: list[str] | None = None,
+ ) -> None:
+ """Select data and datasets from the available datasets based on the timestamp of the data.
+
+ Parameters
+ ----------
+ dates : list[str] | str | None, optional
+ the dates to select, by default None
+ datasets : list[str] | None, optional
+ The datasets to select. Only applicable for the hydrafloods provider, by default None
+
+ """
+ if dates:
+ if isinstance(dates, str):
+ dates = [dates]
+ dates_within_daterange(
+ dates,
start_date=self.start_date,
end_date=self.end_date,
- geometry=self.geometry,
)
- if dates:
- if isinstance(dates, str):
- dates = [dates]
- dates_within_daterange(
- dates, start_date=self.start_date, end_date=self.end_date
- )
- self._provider.generate_flood_extents(dates)
- elif provider == "GFM":
- self.provider_name = "GFM"
- if datasets is not None and datasets != "Sentinel-1":
- log.warning(
- "GFM only provides data based on Sentinel-1, datasets argument is therefore ignored"
- )
- raise NotImplementedError
- else:
- self.provider_name = provider
- def generate_flood_depths(self, **kwargs):
- raise NotImplementedError
+ if self.provider_name == "Hydrafloods":
+ self.provider.select_data(datasets=datasets, dates=dates)
+ if self.provider_name == "GFM":
+ self.provider.select_data(dates=dates)
- def view_flood_extents(self, timeout: int = 300, **kwargs) -> geemap.Map:
- """Plots the generated flood extents on a map together with the data the
- flood extents are generated from.
+ def view_flood_extents(self, timeout: int = 300, **kwargs: dict[Any]) -> geemap.Map | ipyleaflet.Map:
+ """Plot the generated flood extents on a map.
+ Parameters
+ ----------
timeout: int, optional
The time in seconds it takes to raise a timeout error
+ kwargs: dict[Any]
+ keyword arguments that are passed to the view_flood_extents HydraFloods method.
Returns
-------
- geemap.Map
+ geemap.Map or ipyleaflet.Map
"""
if self.provider_name == "hydrafloods":
return self.provider.view_flood_extents(timeout=timeout, **kwargs)
if self.provider_name == "GFM":
- raise NotImplementedError
+ return self.provider.view_data()
+ return None
- def export_data(self, **kwargs):
- if not self._provider:
- raise RuntimeError(
- "FloodMap instance has no data to export, generate flood extents first before calling export_data"
- )
+ def export_data(self, **kwargs: dict) -> None:
+ """Export the flood data."""
return self.provider.export_data(**kwargs)
-def _instantiate_datasets(datasets: Optional[List[str] | str]) -> List[Dataset]:
+def _instantiate_datasets(datasets: list[str] | str) -> list[Dataset]:
if isinstance(datasets, str):
- if datasets not in DATASETS.keys():
- raise ValueError(f"Dataset '{datasets}' not recognized")
+ if datasets not in DATASETS:
+ err_msg = f"Dataset '{datasets}' not recognized"
+ raise ValueError(err_msg)
return [DATASETS[datasets]]
- elif isinstance(datasets, list):
+ if isinstance(datasets, list):
return [DATASETS[dataset] for dataset in datasets]
- else:
- return [dataset for dataset in DATASETS.values()]
+ return list(DATASETS.values())
diff --git a/EO_Floods/leaflet.py b/EO_Floods/leaflet.py
deleted file mode 100644
index 32c4340..0000000
--- a/EO_Floods/leaflet.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from ipyleaflet import Map, WMSLayer, basemaps
-from traitlets import Unicode
-from ipywidgets import SelectionSlider
-
-from typing import List
-
-from EO_Floods.utils import get_centroid, get_dates_in_time_range
-
-WMS_URL = "https://geoserver.gfm.eodc.eu/geoserver/gfm/wms"
-
-
-class TimeWMSLayer(WMSLayer):
- time = Unicode("").tag(sync=True, o=True)
-
-
-class WMS_MapObject:
- def __init__(
- self,
- start_date: str,
- end_date: str,
- layers: str,
- bbox: List[float],
- wms_url: str = WMS_URL,
- ):
- self.wms = TimeWMSLayer(
- url=wms_url,
- layers=layers,
- time=start_date,
- transparent=True,
- format="image/png",
- )
- self.start_date = start_date
- self.end_date = end_date
- self.bbox = bbox
-
- def map(self):
- centroid = get_centroid(self.bbox)
- m = Map(basemap=basemaps.CartoDB.Positron, center=centroid, zoom=9)
- m.add(self.wms)
- return m
-
- def get_slider(self):
- time_options = get_dates_in_time_range(self.start_date, self.end_date)
- self.slider = SelectionSlider(description="Time:", options=time_options)
- self.slider.observe(self.update_wms, "value")
- return self.slider
-
- def update_wms(self, change):
- self.wms.time = self.slider.value
diff --git a/EO_Floods/providers/GFM/__init__.py b/EO_Floods/providers/GFM/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/EO_Floods/providers/GFM/auth.py b/EO_Floods/providers/GFM/auth.py
new file mode 100644
index 0000000..f78ca03
--- /dev/null
+++ b/EO_Floods/providers/GFM/auth.py
@@ -0,0 +1,81 @@
+"""Authentication module for GFM provider."""
+
+from __future__ import annotations
+
+import getpass
+import logging
+import os
+
+import requests
+from requests import Request
+
+log = logging.getLogger(__name__)
+
+
+def authenticate_gfm(email: str | None = None, pwd: str | None = None, *, from_env: bool = False) -> dict:
+ """Authenticate to the GFM server.
+
+ Parameters
+ ----------
+ email : str, optional
+ GFM account email, by default None
+ pwd : str, optional
+ GFM account password, by default None
+ from_env : bool, optional
+ bool option to use environment viarables. If set to True this function will look for 'GFM_EMAIL' and 'GFM_PWD`.
+
+ Returns
+ -------
+ dict
+ returns user information
+
+ Raises
+ ------
+ r.raise_for_status
+ _description_
+
+ """
+ from_input_prompt = False
+ if not email and not pwd and not from_env:
+ log.info(
+ "To authenticate to the GFM API please enter your email and your password in the following prompts",
+ )
+ email = input(prompt="Enter your email")
+ pwd = getpass.getpass(prompt="Enter your password")
+ from_input_prompt = True
+ elif not email and not pwd and from_env:
+ email, pwd = _get_credentials_from_env()
+ url = "https://api.gfm.eodc.eu/v2/auth/login"
+ r = requests.post(url=url, json={"email": email, "password": pwd}, timeout=120)
+ if r.status_code == 200: # noqa: PLR2004
+ log.info("Successfully authenticated to the GFM API")
+ return r.json()
+ if r.status_code == 400: # noqa: PLR2004
+ log.info("Incorrect email or password, please try again")
+ if from_input_prompt:
+ return authenticate_gfm(email, pwd)
+ else:
+ raise r.raise_for_status()
+ return None
+
+
+def _get_credentials_from_env() -> str:
+ email = os.getenv("GFM_EMAIL")
+ pwd = os.getenv("GFM_PWD")
+ if not email or not pwd:
+ err_msg = "Environment variables ['GFM_EMAIL', 'GFM_PWD'] not set."
+ raise ValueError(err_msg)
+ return email, pwd
+
+
+class BearerAuth(requests.auth.AuthBase):
+ """Wrapper class for bearer auth tokens."""
+
+ def __init__(self, token: str) -> None:
+ """Instantiate BearerAuth object."""
+ self.token = token
+
+ def __call__(self, r: Request) -> Request:
+ """__call__ overwrite to add bearer auth token to header."""
+ r.headers["authorization"] = "Bearer " + self.token
+ return r
diff --git a/EO_Floods/providers/GFM/gfm.py b/EO_Floods/providers/GFM/gfm.py
new file mode 100644
index 0000000..a5012a9
--- /dev/null
+++ b/EO_Floods/providers/GFM/gfm.py
@@ -0,0 +1,150 @@
+"""Global Flood Monitor Provider class."""
+
+from __future__ import annotations
+
+import logging
+
+import requests
+
+from EO_Floods.providers import ProviderBase
+from EO_Floods.providers.GFM.auth import BearerAuth, authenticate_gfm
+from EO_Floods.providers.GFM.leaflet import WMSMap
+from EO_Floods.utils import coords_to_geojson
+
+log = logging.getLogger(__name__)
+API_URL = "https://api.gfm.eodc.eu/v2/"
+
+
+class GFM(ProviderBase):
+ """Provider class for retrieving and processing GFM data."""
+
+ def __init__(
+ self,
+ start_date: str,
+ end_date: str,
+ geometry: list[float],
+ *,
+ email: str | None = None,
+ pwd: str | None = None,
+ ) -> None:
+ """Instantiate a GFM provider object.
+
+ Parameters
+ ----------
+ start_date : str
+ start date of the period to retrieve flood data for
+ end_date : str
+ end date of the period to retrieve flood data for
+ geometry : list[float]
+ bounding box in [xmin, ymin, xmax, ymax] format
+ email : _type_, optional
+ email of the GFM user account, by default None
+ pwd : _type_, optional
+ password of the GFM user account, by default None
+
+ """
+ self.user: dict = authenticate_gfm(email, pwd)
+ self.aoi_id: str = self._create_aoi(geometry=coords_to_geojson(geometry))
+ self.start_date: str = start_date
+ self.end_date: str = end_date
+ self.geometry: list[float] = geometry
+ self.products: dict = self._get_products()
+
+ def view_data(self, layer: str = "observed_flood_extent") -> WMSMap:
+ """View the data for the given period and geometry.
+
+ Parameters
+ ----------
+ layer : str, optional
+ name of the data layer, by default "observed_flood_extent"
+
+ Returns
+ -------
+ WMS_Map
+ a ipyleaflet map object wrapped in a custom map class.
+
+ """
+ wms_map = WMSMap(
+ start_date=self.start_date,
+ end_date=self.end_date,
+ layers=layer,
+ bbox=self.geometry,
+ )
+ return wms_map.get_map()
+
+ def available_data(self) -> None:
+ """Show the available data for the given time period and geometry."""
+ dates = [product["product_time"] for product in self.products]
+ log.info("For the following dates there is GFM data: %s", dates)
+
+ def select_data(self, dates: list[str]) -> None:
+ """Select data by supplying a list of timestamps.
+
+ Parameters
+ ----------
+ dates : list[str]
+ a list of timestamps that should match at least one of the timestamps given with the available_data method
+
+ """
+ if not isinstance(dates, list):
+ err_msg = f"dates should be a list of dates, not {type(dates)}"
+ raise ValueError(err_msg)
+ products = [product for product in self.products if product["product_time"] in dates]
+ if not products:
+ err_msg = f"No data found for given date(s): {', '.join(dates)}"
+ raise ValueError(err_msg)
+ self.products = products
+
+ def export_data(self) -> None:
+ """Retrieve a download link for downloading the GFM data."""
+ log.info("Retrieving download link")
+
+ for product in self.products:
+ r = requests.get(
+ url=API_URL + f"download/product/{product['product_id']}/{self.user['client_id']}",
+ auth=BearerAuth(self.user["access_token"]),
+ timeout=300,
+ )
+ if r.status_code != 200: # noqa: PLR2004
+ r.raise_for_status()
+ link = r.json()
+ log.info("Image: %s, download link: %s", product["product_time"], link)
+
+ def _create_aoi(self, geometry: list[float]) -> str:
+ log.info("Uploading geometry to GFM server")
+ payload = {
+ "aoi_name": "flood_aoi",
+ "user_id": self.user["client_id"],
+ "description": "area of interest for flood mapping",
+ "geoJSON": geometry,
+ }
+
+ r = requests.post(
+ API_URL + "/aoi/create",
+ json=payload,
+ auth=BearerAuth(self.user["access_token"]),
+ timeout=120,
+ )
+
+ if r.status_code != 201: # noqa: PLR2004
+ r.raise_for_status()
+ log.info("Successfully uploaded geometry to GFM server")
+
+ return r.json().get("aoi_id")
+
+ def _get_products(self) -> dict:
+ log.info("Retrieving GFM product information")
+ params = {
+ "time": "range",
+ "from": self.start_date + "T00:00:00",
+ "to": self.end_date + "T23:59:59",
+ }
+ r = requests.get(
+ API_URL + f"/aoi/{self.aoi_id}/products",
+ auth=BearerAuth(self.user["access_token"]),
+ params=params,
+ timeout=120,
+ )
+ if r.status_code != 200: # noqa: PLR2004
+ r.raise_for_status()
+ return r.json()["products"]
diff --git a/EO_Floods/providers/GFM/leaflet.py b/EO_Floods/providers/GFM/leaflet.py
new file mode 100644
index 0000000..158961e
--- /dev/null
+++ b/EO_Floods/providers/GFM/leaflet.py
@@ -0,0 +1,81 @@
+"""Mapping class for creating WMS ipyleaflet timeseries maps."""
+
+from __future__ import annotations
+
+from ipyleaflet import Map, WidgetControl, WMSLayer, basemaps
+from ipywidgets import SelectionSlider
+from traitlets import Unicode
+
+from EO_Floods.utils import get_centroid, get_dates_in_time_range
+
+WMS_URL = "https://geoserver.gfm.eodc.eu/geoserver/gfm/wms"
+
+
+class TimeWMSLayer(WMSLayer):
+ """Time series wrapper for WMSLayer."""
+
+ time = Unicode("").tag(sync=True, o=True)
+
+
+class WMSMap:
+ """Class for creating ipyleaflet WMS maps with a time slider."""
+
+ def __init__(
+ self,
+ start_date: str,
+ end_date: str,
+ layers: str | list[str],
+ bbox: list[float],
+ wms_url: str = WMS_URL,
+ ) -> None:
+ """Instantiate a WMSMap object.
+
+ Parameters
+ ----------
+ start_date : str
+ start date of timeseries
+ end_date : str
+ end date of timeseries
+ layers : str or list[str]
+ name of map layers
+ bbox : list[float]
+ bounding box in [xmin, ymin, xmax, ymax] format
+ wms_url : str, optional
+ url of the WMS, by default WMS_URL
+
+ """
+ self.wms = TimeWMSLayer(
+ url=wms_url,
+ layers=layers,
+ time=start_date,
+ transparent=True,
+ format="image/png",
+ )
+ self.start_date = start_date
+ self.end_date = end_date
+ self.bbox = bbox
+
+ def get_map(self) -> Map:
+ """Create a WMS map with a time slider.
+
+ Returns
+ -------
+ Map
+ ipyleaflet map instance
+
+ """
+ centroid = get_centroid(self.bbox)
+ m = Map(basemap=basemaps.OpenStreetMap.Mapnik, center=centroid, zoom=9)
+ m.add(self.wms)
+ self.slider = self._get_slider()
+ self.slider.observe(self._update_wms, "value")
+ slider_cntrl = WidgetControl(widget=self.slider, position="bottomright")
+ m.add(slider_cntrl)
+ return m
+
+ def _get_slider(self) -> SelectionSlider:
+ time_options = get_dates_in_time_range(self.start_date, self.end_date)
+ return SelectionSlider(description="Time:", options=time_options)
+
+ def _update_wms(self) -> None:
+ self.wms.time = self.slider.value
diff --git a/EO_Floods/providers/__init__.py b/EO_Floods/providers/__init__.py
index 27c80dd..3b6182a 100644
--- a/EO_Floods/providers/__init__.py
+++ b/EO_Floods/providers/__init__.py
@@ -1,5 +1,5 @@
from EO_Floods.providers.base import ProviderBase, Providers
-from EO_Floods.providers.hydrafloods import HydraFloods
-from EO_Floods.providers.gfm import GFM
+from EO_Floods.providers.hydrafloods.hydrafloods import HydraFloods
+from EO_Floods.providers.GFM.gfm import GFM
__all___ = ["ProviderBase", "HydraFloods", "GFM", "providers"]
diff --git a/EO_Floods/providers/gfm.py b/EO_Floods/providers/gfm.py
deleted file mode 100644
index 49cad76..0000000
--- a/EO_Floods/providers/gfm.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import logging
-from typing import List
-
-from EO_Floods.providers import ProviderBase
-from EO_Floods.auth import GFM_authenticate
-from EO_Floods.utils import coords_to_geojson
-from EO_Floods.leaflet import WMS_MapObject
-
-logger = logging.getLogger(__name__)
-
-
-class GFM(ProviderBase):
- def __init__(
- self,
- start_date: str,
- end_date: str,
- geometry: List[float],
- ):
- self.user: dict = GFM_authenticate()
- self.geojson: dict = coords_to_geojson(geometry)
- self.start_date: str = start_date
- self.end_date: str = end_date
- self.geometry: List[float] = geometry
-
- def preview_data(self, layer: str = "observed_flood_extent") -> WMS_MapObject:
- return WMS_MapObject(
- start_date=self.start_date,
- end_date=self.end_date,
- layers=layer,
- bbox=self.geometry,
- )
diff --git a/EO_Floods/providers/hydrafloods/__init__.py b/EO_Floods/providers/hydrafloods/__init__.py
new file mode 100644
index 0000000..8289cb9
--- /dev/null
+++ b/EO_Floods/providers/hydrafloods/__init__.py
@@ -0,0 +1,3 @@
+from EO_Floods.providers.hydrafloods.hydrafloods import HydraFloods, HydraFloodsDataset
+
+__all__ = ["HydraFloods", "HydraFloodsDataset"]
diff --git a/EO_Floods/providers/hydrafloods/auth.py b/EO_Floods/providers/hydrafloods/auth.py
new file mode 100644
index 0000000..192def3
--- /dev/null
+++ b/EO_Floods/providers/hydrafloods/auth.py
@@ -0,0 +1,27 @@
+"""Function for authenticatinh to earth engine with environment variables."""
+
+import logging
+import os
+from pathlib import Path
+
+import ee
+
+log = logging.getLogger(__name__)
+
+
+def ee_initialize(token_name: str = "EARTHENGINE_TOKEN") -> None: # noqa: S107
+ """Authenticate Earth Engine and intializes an Earth Engine session."""
+ if ee.data._credentials is None: # noqa: SLF001
+ try:
+ ee_token = os.environ.get(token_name)
+ if ee_token is not None:
+ credentials_file_path = Path("~/.config/earthengine/").expanduser()
+ if not credentials_file_path.exists():
+ credential = '{"refresh_token":"{ee_token}"}'
+ credentials_file_path.mkdir(parents=True)
+ with credentials_file_path.joinpath("credentials").open("w") as file:
+ file.write(credential)
+ ee.Initialize()
+ except Exception: # noqa: BLE001
+ ee.Authenticate()
+ ee.Initialize(project=os.environ.get("EARTH_ENGINE_PROJECT"))
diff --git a/EO_Floods/dataset.py b/EO_Floods/providers/hydrafloods/dataset.py
similarity index 53%
rename from EO_Floods/dataset.py
rename to EO_Floods/providers/hydrafloods/dataset.py
index b23ba1a..5b43349 100644
--- a/EO_Floods/dataset.py
+++ b/EO_Floods/providers/hydrafloods/dataset.py
@@ -1,5 +1,13 @@
+import logging
from pydantic import BaseModel
from enum import Enum
+import hydrafloods as hf
+import ee
+from typing import List
+
+from EO_Floods.utils import calc_quality_score
+
+logger = logging.getLogger(__name__)
class ImageryType(Enum):
@@ -91,3 +99,60 @@ class MODIS(Dataset):
"VIIRS": VIIRS(),
"MODIS": MODIS(),
}
+
+
+class HydraFloodsDataset:
+ def __init__(
+ self,
+ dataset: Dataset,
+ region: ee.geometry.Geometry,
+ start_date: str,
+ end_date: str,
+ **kwargs,
+ ):
+ """Class for initializing Hydrafloods datasets.
+
+ Parameters
+ ----------
+ dataset : Dataset
+ EO_Floods.Dataset object containing information on the dataset and configuration
+ for processing.
+ region : ee.geometry.Geometry
+ Earth Engine geometry that represents the area of interest.
+ start_date : str
+ Start date of the time window of interest (YYY-mm-dd).
+ end_date : str
+ End date of the time window of interest (YYY-mm-dd).
+ """
+ HF_datasets = {
+ "Sentinel-1": hf.Sentinel1,
+ "Sentinel-2": hf.Sentinel2,
+ "Landsat 7": hf.Landsat7,
+ "Landsat 8": hf.Landsat8,
+ "VIIRS": hf.Viirs,
+ "MODIS": hf.Modis,
+ }
+ self.name: str = dataset.name
+ self.short_name: str = dataset.short_name
+ self.imagery_type: ImageryType = dataset.imagery_type
+ self.default_flood_extent_algorithm: str = (
+ dataset.default_flood_extent_algorithm
+ )
+ self.region = region
+ self.qa_band = dataset.qa_band
+ self.algorithm_params: dict = dataset.algorithm_params
+ self.visual_params: dict = dataset.visual_params
+ self.providers = dataset.providers
+ self.obj: hf.Dataset = HF_datasets[dataset.name](
+ region=region, start_time=start_date, end_time=end_date, **kwargs
+ )
+ logger.debug(f"Initialized hydrafloods dataset for {self.name}")
+
+ def _calc_quality_score(self) -> List[float]:
+ if (
+ self.name in ["VIIRS", "MODIS"]
+ ): # these datasets consist of global images, need to be clipped first before reducing
+ self.obj.apply_func(func=lambda x: x.clip(self.region), inplace=True)
+ self.obj.apply_func(func=calc_quality_score, inplace=True, band=self.qa_band)
+ qa_score = self.obj.collection.aggregate_array("qa_score").getInfo()
+ return [round(score, 2) for score in qa_score]
diff --git a/EO_Floods/providers/hydrafloods.py b/EO_Floods/providers/hydrafloods/hydrafloods.py
similarity index 68%
rename from EO_Floods/providers/hydrafloods.py
rename to EO_Floods/providers/hydrafloods/hydrafloods.py
index 31e54f5..9b74957 100644
--- a/EO_Floods/providers/hydrafloods.py
+++ b/EO_Floods/providers/hydrafloods/hydrafloods.py
@@ -1,24 +1,26 @@
-from typing import List, Optional
-import warnings
-import datetime
import logging
+import multiprocessing.pool
import re
+import warnings
+from typing import List, Optional
+import ee
+import geemap.foliumap as geemap
import hydrafloods as hf
from hydrafloods.geeutils import batch_export
-import geemap.foliumap as geemap
-import ee
-import multiprocessing.pool
from tabulate import tabulate
-from EO_Floods.dataset import Dataset, ImageryType, DATASETS
+from EO_Floods.providers import ProviderBase
+from EO_Floods.providers.hydrafloods.dataset import (
+ Dataset,
+ HydraFloodsDataset,
+ ImageryType,
+)
from EO_Floods.utils import (
coords_to_ee_geom,
- get_centroid,
date_parser,
- calc_quality_score,
+ get_centroid,
)
-from EO_Floods.providers import ProviderBase
logger = logging.getLogger(__name__)
@@ -132,7 +134,23 @@ def view_data(
)
return Map
- def generate_flood_extents(
+ def select_data(
+ self, datasets: Optional[List[str]] = None, dates: Optional[List[str]] = None
+ ):
+ if datasets:
+ self.datasets = [
+ dataset for dataset in self.datasets if dataset.name in datasets
+ ]
+ if dates:
+ for dataset in self.datasets:
+ # Filter the dataset on dates
+ if len(dates) > 1:
+ filter = _multiple_dates_filter(dates)
+ else:
+ filter = _date_filter(dates[0])
+ dataset.obj.filter(filter, inplace=True)
+
+ def _generate_flood_extents(
self, dates: Optional[List[str]] = None, clip_ocean: bool = True
) -> None:
"""Generates flood extents on the selected datasets and for the given temporal
@@ -206,7 +224,9 @@ def generate_flood_extents(
def generate_flood_depths(self):
pass
- def view_flood_extents(self, timeout: int = 60, zoom: int = 8) -> geemap.Map:
+ def view_flood_extents(
+ self, dates, timeout: int = 60, zoom: int = 8, clip_ocean: bool = True
+ ) -> geemap.Map:
"""View the flood extents on a geemap.Map object.
Parameters
@@ -223,43 +243,11 @@ def view_flood_extents(self, timeout: int = 60, zoom: int = 8) -> geemap.Map:
"""
if not hasattr(self, "flood_extents"):
- raise RuntimeError(
- "generate_flood_extents() needs to be called before calling this method"
- )
-
- def _plot_flood_extents(zoom: int):
- flood_extent_vis_params = {
- "bands": ["water"],
- "min": 0,
- "max": 1,
- "palette": ["#C0C0C0", "#000080"],
- }
- map = self.view_data()
- for ds_name in self.flood_extents:
- img_col = self.flood_extents[ds_name].collection
- n_images = img_col.size().getInfo()
- for n in range(n_images):
- img = ee.Image(img_col.toList(n_images).get(n))
- # date = datetime.datetime.fromtimestamp(
- # img.get("system:time_start").getInfo()
- # ).strftime("%Y-%m-%d HH:MM:SS")
- map.addLayer(
- img,
- vis_params=flood_extent_vis_params,
- name=f"{ds_name} flood extent",
- )
-
- max_extent_img = self.flood_extents[ds_name].collection.max()
- map.add_layer(
- max_extent_img,
- vis_params=flood_extent_vis_params,
- name=f"{ds_name} max flood extent",
- )
- return map
+ self._generate_flood_extents(dates=dates, clip_ocean=clip_ocean)
try:
with multiprocessing.pool.ThreadPool() as pool:
- return_value = pool.apply_async(_plot_flood_extents, (zoom,)).get(
+ return_value = pool.apply_async(self._plot_flood_extents, (zoom,)).get(
timeout=timeout
)
except multiprocessing.TimeoutError:
@@ -275,9 +263,11 @@ def export_data(
self,
export_type: str = "toDrive",
include_base_data: bool = False,
- folder: str = None,
+ folder: Optional[str] = None,
ee_asset_path: str = "",
- scale: int | float = None,
+ clip_ocean: bool = True,
+ dates: Optional[List[str]] = None,
+ scale: Optional[int | float] = None,
**kwargs,
):
"""Exports the data generated in the floodmapping workflow to a Google Drive
@@ -301,33 +291,31 @@ def export_data(
if export_type == "toDrive":
folder = "EO_Floods"
- if hasattr(self, "flood_extents"):
- for ds in self.flood_extents.keys():
- logger.info(
- f"Exporting {ds} flood extents {export_type[:2]+' '+export_type[2:]}"
- )
+ if not hasattr(self, "flood_extents"):
+ self._generate_flood_extents(dates, clip_ocean=clip_ocean)
+ for ds in self.flood_extents.keys():
+ logger.info(
+ f"Exporting {ds} flood extents {export_type[:2]+' '+export_type[2:]}"
+ )
- if not scale:
- scale = (
- self.flood_extents[ds]
- .collection.first()
- .select("water")
- .projection()
- .nominalScale(),
- )
- batch_export(
- collection=self.flood_extents[ds].collection,
- collection_asset=ee_asset_path,
- export_type=export_type,
- folder=folder,
- suffix=f"{ds.replace(' ', '_')}_flood_extent",
- scale=scale,
- **kwargs,
+ if not scale:
+ scale = (
+ self.flood_extents[ds]
+ .collection.first()
+ .select("water")
+ .projection()
+ .nominalScale(),
)
- else:
- raise RuntimeError(
- "First call generate_flood_extents() before calling export_data()"
+ batch_export(
+ collection=self.flood_extents[ds].collection,
+ collection_asset=ee_asset_path,
+ export_type=export_type,
+ folder=folder,
+ suffix=f"{ds.replace(' ', '_')}_flood_extent",
+ scale=scale,
+ **kwargs,
)
+
if include_base_data:
for dataset in self.datasets:
logger.info(
@@ -348,6 +336,36 @@ def export_data(
**kwargs,
)
+ def _plot_flood_extents(self, zoom: int):
+ flood_extent_vis_params = {
+ "bands": ["water"],
+ "min": 0,
+ "max": 1,
+ "palette": ["#C0C0C0", "#000080"],
+ }
+ map = self.view_data()
+ for ds_name in self.flood_extents:
+ img_col = self.flood_extents[ds_name].collection
+ n_images = img_col.size().getInfo()
+ for n in range(n_images):
+ img = ee.Image(img_col.toList(n_images).get(n))
+ # date = datetime.datetime.fromtimestamp(
+ # img.get("system:time_start").getInfo()
+ # ).strftime("%Y-%m-%d HH:MM:SS")
+ map.addLayer(
+ img,
+ vis_params=flood_extent_vis_params,
+ name=f"{ds_name} flood extent",
+ )
+
+ max_extent_img = self.flood_extents[ds_name].collection.max()
+ map.add_layer(
+ max_extent_img,
+ vis_params=flood_extent_vis_params,
+ name=f"{ds_name} max flood extent",
+ )
+ return map
+
def _date_filter(date: str) -> ee.Filter:
d = ee.Date(date_parser(date))
@@ -366,65 +384,3 @@ def _multiple_dates_filter(dates: list[str]):
def _filter_collection_by_dates(date: str, dataset):
img = dataset.obj.collection.filter(_date_filter(date))
return img
-
-
-class HydraFloodsDataset:
- def __init__(
- self,
- dataset: Dataset,
- region: ee.geometry.Geometry,
- start_date: str,
- end_date: str,
- **kwargs,
- ):
- """Class for initializing Hydrafloods datasets.
-
- Parameters
- ----------
- dataset : Dataset
- EO_Floods.Dataset object containing information on the dataset and configuration
- for processing.
- region : ee.geometry.Geometry
- Earth Engine geometry that represents the area of interest.
- start_date : str
- Start date of the time window of interest (YYY-mm-dd).
- end_date : str
- End date of the time window of interest (YYY-mm-dd).
- """
- HF_datasets = {
- "Sentinel-1": hf.Sentinel1,
- "Sentinel-2": hf.Sentinel2,
- "Landsat 7": hf.Landsat7,
- "Landsat 8": hf.Landsat8,
- "VIIRS": hf.Viirs,
- "MODIS": hf.Modis,
- }
- self.name: str = dataset.name
- self.short_name: str = dataset.short_name
- self.imagery_type: ImageryType = dataset.imagery_type
- self.default_flood_extent_algorithm: str = (
- dataset.default_flood_extent_algorithm
- )
- self.region = region
- self.qa_band = dataset.qa_band
- self.algorithm_params: dict = dataset.algorithm_params
- self.visual_params: dict = dataset.visual_params
- self.providers = dataset.providers
- self.obj: hf.Dataset = HF_datasets[dataset.name](
- region=region, start_time=start_date, end_time=end_date, **kwargs
- )
- logger.debug(f"Initialized hydrafloods dataset for {self.name}")
-
- # col_size = self.obj.n_images
- # self.obj.collection = _mosaic_same_date_images(
- # self.obj.collection, size=col_size
- # )
-
- def _calc_quality_score(self) -> List[float]:
- if (
- self.name in ["VIIRS", "MODIS"]
- ): # these datasets consist of global images, need to be clipped first before reducing
- self.obj.apply_func(func=lambda x: x.clip(self.region), inplace=True)
- self.obj.apply_func(func=calc_quality_score, inplace=True, band=self.qa_band)
- qa_score = self.obj.collection.aggregate_array("qa_score").getInfo()
- return [round(score, 2) for score in qa_score]
diff --git a/environment.yml b/environment.yml
index 7993e08..b635ef4 100644
--- a/environment.yml
+++ b/environment.yml
@@ -9,12 +9,10 @@ dependencies:
- numpy=1.26.0
- pandas=2.2.1
- pip:
- - hydrafloods
- - geemap
- - pytest
- - pydantic_settings
- - mock
- - tabulate
-
-
-
+ - hydrafloods
+ - geemap
+ - pytest
+ - pytest-mock
+ - pydantic_settings
+ - mock
+ - tabulate
diff --git a/notebooks/GFM_example.ipynb b/notebooks/GFM_example.ipynb
index 70414d7..274c756 100644
--- a/notebooks/GFM_example.ipynb
+++ b/notebooks/GFM_example.ipynb
@@ -6,18 +6,64 @@
"metadata": {},
"outputs": [],
"source": [
- "from EO_Floods.utils import get_dates_in_time_range"
+ "from EO_Floods import FloodMap"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "To authenticate to the GFM API please enter your email and your password in the following prompts\n",
+ "Successfully authenticated to the GFM API\n",
+ "INFO:EO_Floods.providers.GFM.gfm:Uploading geometry to GFM server\n",
+ "INFO:EO_Floods.providers.GFM.gfm:Successfully uploaded geometry to GFM server\n",
+ "INFO:EO_Floods.providers.GFM.gfm:Retrieving GFM product information\n",
+ "INFO:EO_Floods.floodmap:Provider set as GFM\n",
+ "INFO:EO_Floods.floodmap:Flood map object initialized\n"
+ ]
+ }
+ ],
"source": [
- "from EO_Floods.leaflet import WMS_MapObject\n",
- "\n",
- "wms_map = WMS_MapObject(wms_url=\"https://geoserver.gfm.eodc.eu/geoserver/gfm/wms\", layers=\"observed_water_extent\", start_date=\"2022-01-01\", end_date=\"2022-04-30\", bbox=[4.294793,51.974955,4.412896,52.042158])"
+ "floodmap = FloodMap(start_date=\"2022-10-01\", end_date=\"2022-10-15\", geometry=[67.740187,27.712453,68.104933,28.000935], provider=\"GFM\")"
]
},
{
@@ -27,22 +73,48 @@
"outputs": [
{
"data": {
- "application/vnd.jupyter.widget-view+json": {
- "model_id": "123a2e55b15e4e3d856428d6b940898b",
- "version_major": 2,
- "version_minor": 0
- },
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
"text/plain": [
- "Map(center=[52.0085565, 4.3538445], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title'…"
+ ""
]
},
- "execution_count": 4,
"metadata": {},
- "output_type": "execute_result"
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "INFO:EO_Floods.providers.GFM.gfm:For the following dates there is GFM data: ['2022-10-02T13:36:35', '2022-10-05T01:25:26', '2022-10-05T01:25:51', '2022-10-09T13:28:25', '2022-10-14T13:36:35']\n"
+ ]
}
],
"source": [
- "wms_map.map()"
+ "floodmap.available_data()"
]
},
{
@@ -50,15 +122,49 @@
"execution_count": 5,
"metadata": {},
"outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
{
"data": {
"application/vnd.jupyter.widget-view+json": {
- "model_id": "2a3979739b66443cae28fb7f098c1188",
+ "model_id": "16bf2bdc3627481d8b6cb8bac493db4c",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
- "SelectionSlider(description='Time:', options=('2022-01-01', '2022-01-02', '2022-01-03', '2022-01-04', '2022-01…"
+ "Map(center=[27.856693999999997, 67.92256], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in…"
]
},
"execution_count": 5,
@@ -67,7 +173,60 @@
}
],
"source": [
- "wms_map.get_slider()"
+ "floodmap.provider.view_data()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "floodmap.select_data(dates=\"2022-10-02T13:36:35\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "floodmap.export_data()"
]
},
{
@@ -94,7 +253,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.6"
+ "version": "3.11.9"
}
},
"nbformat": 4,
diff --git a/notebooks/hydrafloods_example.ipynb b/notebooks/hydrafloods_example.ipynb
index 23cea17..cfb28eb 100644
--- a/notebooks/hydrafloods_example.ipynb
+++ b/notebooks/hydrafloods_example.ipynb
@@ -7,8 +7,8 @@
"outputs": [],
"source": [
"import ee\n",
- "ee.Initialize()\n",
- "from EO_Floods.floodmap import FloodMap"
+ "from EO_Floods.floodmap import FloodMap\n",
+ "ee.Initialize()"
]
},
{
@@ -75,8 +75,29 @@
"name": "stdout",
"output_type": "stream",
"text": [
+ "INFO:EO_Floods.floodmap:Provider set as Hydrafloods\n",
"INFO:EO_Floods.floodmap:Flood map object initialized\n"
]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/home/tjalling/miniconda3/envs/EO-Floods/lib/python3.11/site-packages/ee/deprecation.py:204: DeprecationWarning: \n",
+ "\n",
+ "Attention required for NOAA/VIIRS/001/VNP09GA! You are using a deprecated asset.\n",
+ "To ensure continued functionality, please update it.\n",
+ "Learn more: https://developers.google.com/earth-engine/datasets/catalog/NOAA_VIIRS_001_VNP09GA\n",
+ "\n",
+ " warnings.warn(warning, category=DeprecationWarning)\n",
+ "/home/tjalling/miniconda3/envs/EO-Floods/lib/python3.11/site-packages/ee/deprecation.py:204: DeprecationWarning: \n",
+ "\n",
+ "Attention required for MODIS/006/MOD09GA! You are using a deprecated asset.\n",
+ "To ensure continued functionality, please update it.\n",
+ "Learn more: https://developers.google.com/earth-engine/datasets/catalog/MODIS_006_MOD09GA\n",
+ "\n",
+ " warnings.warn(warning, category=DeprecationWarning)\n"
+ ]
}
],
"source": [
@@ -84,6 +105,7 @@
"floodmap = FloodMap(\n",
" start_date=\"2022-10-01\",\n",
" end_date=\"2022-10-15\",\n",
+ " provider=\"Hydrafloods\",\n",
" geometry=[67.740187,27.712453,68.104933,28.000935],\n",
")\n"
]
@@ -134,30 +156,14 @@
"metadata": {},
"output_type": "display_data"
},
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/home/tjalling/miniconda3/envs/EO-Floods/lib/python3.11/site-packages/ee/deprecation.py:204: DeprecationWarning: \n",
- "\n",
- "Attention required for NOAA/VIIRS/001/VNP09GA! You are using a deprecated asset.\n",
- "To ensure continued functionality, please update it.\n",
- "Learn more: https://developers.google.com/earth-engine/datasets/catalog/NOAA_VIIRS_001_VNP09GA\n",
- "\n",
- " warnings.warn(warning, category=DeprecationWarning)\n",
- "/home/tjalling/miniconda3/envs/EO-Floods/lib/python3.11/site-packages/ee/deprecation.py:204: DeprecationWarning: \n",
- "\n",
- "Attention required for MODIS/006/MOD09GA! You are using a deprecated asset.\n",
- "To ensure continued functionality, please update it.\n",
- "Learn more: https://developers.google.com/earth-engine/datasets/catalog/MODIS_006_MOD09GA\n",
- "\n",
- " warnings.warn(warning, category=DeprecationWarning)\n"
- ]
- },
{
"name": "stdout",
"output_type": "stream",
"text": [
+ "WARNING:googleapiclient.http:Sleeping 1.89 seconds before retry 1 of 5 for request: POST https://earthengine.googleapis.com/v1/projects/ee-tjallingdejong-em/value:compute?prettyPrint=false&alt=json, after 429\n",
+ "WARNING:googleapiclient.http:Sleeping 3.45 seconds before retry 2 of 5 for request: POST https://earthengine.googleapis.com/v1/projects/ee-tjallingdejong-em/value:compute?prettyPrint=false&alt=json, after 429\n",
+ "WARNING:googleapiclient.http:Sleeping 0.96 seconds before retry 1 of 5 for request: POST https://earthengine.googleapis.com/v1/projects/ee-tjallingdejong-em/value:compute?prettyPrint=false&alt=json, after 429\n",
+ "WARNING:googleapiclient.http:Sleeping 1.83 seconds before retry 2 of 5 for request: POST https://earthengine.googleapis.com/v1/projects/ee-tjallingdejong-em/value:compute?prettyPrint=false&alt=json, after 429\n",
"======================================================================\n",
"Dataset name: Sentinel-1\n",
"Number of images: 5\n",
diff --git a/pyproject.toml b/pyproject.toml
index 5a42ef9..8f3828e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -22,3 +22,14 @@ dynamic = ["version", "description"]
[project.urls]
Home = "https://github.com/Deltares-research/EO-floods"
+
+
+[tool.ruff.lint]
+select = ["ALL"]
+
+[tool.ruff]
+line-length = 120
+exclude = ["tests/*.py"]
+
+[tool.pytest.ini_options]
+testpaths = ["tests"]
\ No newline at end of file
diff --git a/tests/conftest.py b/tests/conftest.py
index 4e20324..9edc4e1 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,5 @@
import pytest
-from EO_Floods.auth import ee_initialize
+from EO_Floods.providers.hydrafloods.auth import ee_initialize
from EO_Floods.floodmap import FloodMap
ee_initialize()
diff --git a/tests/test_floodmap.py b/tests/test_floodmap.py
index 99e60d7..fc54982 100644
--- a/tests/test_floodmap.py
+++ b/tests/test_floodmap.py
@@ -5,7 +5,7 @@
from unittest.mock import patch
from EO_Floods.floodmap import FloodMap
-from EO_Floods.dataset import Dataset
+from EO_Floods.providers.hydrafloods.dataset import Dataset
from EO_Floods.providers.hydrafloods import HydraFloods
diff --git a/tests/test_gfm.py b/tests/test_gfm.py
new file mode 100644
index 0000000..f3f066e
--- /dev/null
+++ b/tests/test_gfm.py
@@ -0,0 +1,87 @@
+from dotenv import load_dotenv
+import os
+import logging
+import re
+
+from ipyleaflet import Map
+import pytest
+from EO_Floods.providers import GFM
+from EO_Floods.providers.GFM.auth import authenticate_gfm, _get_credentials_from_env
+
+load_dotenv()
+
+
+@pytest.mark.integration
+def test_GFM(caplog, mocker):
+ caplog.set_level(logging.INFO)
+
+ gfm = GFM(
+ start_date="2022-10-01",
+ end_date="2022-10-15",
+ geometry=[67.740187, 27.712453, 68.104933, 28.000935],
+ email=os.getenv("GFM_EMAIL"),
+ pwd=os.getenv("GFM_PWD"),
+ )
+ # Test init
+ assert "Successfully authenticated to the GFM API\n" in caplog.text
+ assert "Successfully uploaded geometry to GFM server" in caplog.text
+ assert "Retrieving GFM product information" in caplog.text
+ assert len(gfm.products) == 5
+ assert gfm.user["client_id"] == "iiq9MAfBmxgYynhpxFwi78J5"
+
+ # Test available_data
+ dates = [product["product_time"] for product in gfm.products]
+ gfm.available_data()
+ assert f"For the following dates there is GFM data: {dates}" in caplog.text
+
+ # Test select_data
+ gfm.select_data(dates=dates[:2])
+ assert len(gfm.products) == 2
+
+ # Test view_data
+ wms_map = gfm.view_data()
+ assert isinstance(wms_map, Map)
+
+ # Test export_data
+ mock_get_request = mocker.patch("requests.get")
+ mock_request_response = mocker.Mock(status_code=200, json=lambda: {"link": "mock_link"})
+ mock_get_request.return_value = mock_request_response
+ gfm.export_data()
+ for product in gfm.products:
+ assert f"Image: {product['product_time']}, download link: {{'link': 'mock_link'}}" in caplog.text
+
+ with pytest.raises(ValueError, match="dates should be a list of dates, not "):
+ gfm.select_data(dates="03-04-1995")
+
+ with pytest.raises(
+ ValueError, match="No data found for given date\(s\): 03-04-1995"
+ ):
+ gfm.select_data(dates=["03-04-1995"])
+
+
+
+
+def test_GFM_auth(mocker, caplog):
+ mock_post_request = mocker.patch("requests.post")
+ mock_response = mocker.Mock(status_code=200, json=lambda: {"test": "data"})
+ mock_post_request.return_value = mock_response
+ response = authenticate_gfm(email=os.getenv("GFM_EMAIL"), pwd=os.getenv("GFM_PWD"))
+ assert "Successfully authenticated to the GFM API\n" in caplog.text
+ assert isinstance(response, dict)
+
+ response = authenticate_gfm(from_env=True)
+ assert "Successfully authenticated to the GFM API\n" in caplog.text
+ assert isinstance(response, dict)
+
+ mock_response.status_code = 400
+ authenticate_gfm(email=os.getenv("GFM_EMAIL"), pwd=os.getenv("GFM_PWD"))
+ assert "Incorrect email or password, please try again\n" in caplog.text
+
+
+def test_GFM_auth_get_credentials_from_env(mocker):
+ email,pwd = _get_credentials_from_env()
+ assert email == os.getenv("GFM_EMAIL")
+ mock_getenv = mocker.patch("os.getenv")
+ mock_getenv.return_value = None
+ with pytest.raises(ValueError, match=re.escape("Environment variables ['GFM_EMAIL', 'GFM_PWD'] not set.")):
+ _get_credentials_from_env()
\ No newline at end of file
diff --git a/tests/test_hydrafloods.py b/tests/test_hydrafloods.py
index 21c5e8c..8666d80 100644
--- a/tests/test_hydrafloods.py
+++ b/tests/test_hydrafloods.py
@@ -4,8 +4,7 @@
import geemap
import pytest
from EO_Floods.providers.hydrafloods import HydraFloodsDataset, HydraFloods
-from EO_Floods.utils import date_parser
-from EO_Floods.dataset import DATASETS
+from EO_Floods.providers.hydrafloods.dataset import DATASETS
def hydrafloods_instance(dataset_list: list) -> HydraFloods: