Skip to content

Commit

Permalink
Merge pull request #189 from NOAA-OWP/deprecated_function_removal
Browse files Browse the repository at this point in the history
Deprecated function removal
  • Loading branch information
GregoryPetrochenkov-NOAA authored Oct 9, 2024
2 parents 1d0a21b + f10eeed commit e0d88d3
Show file tree
Hide file tree
Showing 10 changed files with 33 additions and 11 deletions.
16 changes: 16 additions & 0 deletions LICENSE.MD
Original file line number Diff line number Diff line change
@@ -1,7 +1,23 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

-----------------

“Software code created by U.S. Government employees is not subject to copyright
in the United States (17 U.S.C. §105). The United States/Department of Commerce
reserve all rights to seek and obtain copyright protection in countries other
than the United States for Software authored in its entirety by the Department
of Commerce. To this end, the Department of Commerce hereby grants to Recipient
a royalty-free, nonexclusive license to use, copy, and create derivative works
of the Software outside of the United States.”

-----------------
2 changes: 1 addition & 1 deletion docs/sphinx/SphinxCatalogTutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -884,7 +884,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.10"
"version": "3.10.14"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion docs/sphinx/SphinxContinuousTutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.10"
"version": "3.10.14"
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions docs/sphinx/SphinxTutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"id": "64da5e7b",
"metadata": {},
"source": [
"It is preferred to use masking and scaling by default. If your original data does not have nodata or does not have nodata assigned, please assign using: `rio.set_nodata(<your_nodata_value>)`"
"It is preferred to use masking and scaling by default. If your original data does not have nodata or does not have nodata assigned, please assign using: `rio.write_nodata(<your_nodata_value>, inplace=True)`"
]
},
{
Expand Down Expand Up @@ -1158,7 +1158,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.10"
"version": "3.10.14"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion notebooks/Tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"id": "64da5e7b",
"metadata": {},
"source": [
"It is preferred to use masking and scaling by default. If your original data does not have nodata or does not have nodata assigned, please assign using: `rio.set_nodata(<your_nodata_value>)`"
"It is preferred to use masking and scaling by default. If your original data does not have nodata or does not have nodata assigned, please assign using: `rio.write_nodata(<your_nodata_value>, inplace=True)`"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ authors = [
requires-python = ">=3.8"
keywords = ["geospatial", "evaluations"]
license = {text = "MIT"}
version = "0.2.7-1"
version = "0.2.9"
dynamic = ["readme", "dependencies"]

[project.optional-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion src/gval/comparison/agreement.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def _manage_information_loss(agreement_map, crs, nodata, encode_nodata, dtype):
nodata, encoded=encode_nodata, inplace=True
)
else:
agreement_map.rio.set_nodata(nodata, inplace=True)
agreement_map.rio.write_nodata(nodata, inplace=True)

return agreement_map

Expand Down
6 changes: 4 additions & 2 deletions src/gval/homogenize/rasterize.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def _rasterize_data(
if isinstance(candidate_map, xr.DataArray):
rasterized_data = rasterized_data.to_array()
# Nodata resolve
rasterized_data.rio.set_nodata(np.nan)
rasterized_data.rio.write_nodata(np.nan, encoded=True, inplace=True)
rasterized_data.rio.write_nodata(
candidate_map.rio.encoded_nodata, encoded=True, inplace=True
)
Expand Down Expand Up @@ -94,7 +94,9 @@ def _rasterize_data(
for var_name in rasterized_data.data_vars.keys():
# Resolve nodata issues

rasterized_data[var_name] = rasterized_data[var_name].rio.set_nodata(np.nan)
rasterized_data[var_name] = rasterized_data[var_name].rio.write_nodata(
np.nan, encoded=True, inplace=True
)

if rasterized_data[var_name].rio.encoded_nodata is None:
rasterized_data[var_name] = rasterized_data[var_name].rio.write_nodata(
Expand Down
2 changes: 1 addition & 1 deletion tests/data_generation/data_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def generate_aligned_and_agreement_maps(

agreement_map_computed.rio.write_crs(cam.rio.crs, inplace=True)

agreement_map_computed.rio.set_nodata(-9999)
agreement_map_computed.rio.write_nodata(-9999, inplace=True)
if np.nan in agreement_map_computed:
agreement_map_computed = xr.where(
np.isnan(agreement_map_computed), -9999, agreement_map_computed
Expand Down
6 changes: 5 additions & 1 deletion tests/test_catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,11 @@ def test_stac_catalog_comparison_success(
stac_clog = catalog_compare(**arguments)

pd.testing.assert_frame_equal(
stac_clog, expected_catalog_df, check_dtype=False, check_index_type=False
stac_clog,
expected_catalog_df,
check_dtype=False,
check_index_type=False,
check_like=True,
), "Computed catalog did not match the expected catalog df"


Expand Down

0 comments on commit e0d88d3

Please sign in to comment.