Skip to content

Commit

Permalink
Merge pull request #5799 from trexfeathers/v3.8.x.updates
Browse files Browse the repository at this point in the history
v3.8.1 updates
  • Loading branch information
bjlittle authored Mar 4, 2024
2 parents b5a754e + a0adb70 commit 3943951
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 29 deletions.
16 changes: 16 additions & 0 deletions docs/src/whatsnew/3.8.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,22 @@ This document explains the changes made to Iris for this release
any issues or feature requests for improving Iris. Enjoy!


v3.8.1 (04 Mar 2024)
====================

.. dropdown:: v3.8.1 Patches
:color: primary
:icon: alert
:animate: fade-in
:open:

The patches in this release of Iris include:

#. `@stephenworsley`_ fixed a potential memory leak for Iris uses of
:func:`dask.array.map_blocks`; known specifically to be a problem in the
:class:`iris.analysis.AreaWeighted` regridder. (:pull:`5767`)


📢 Announcements
================

Expand Down
47 changes: 34 additions & 13 deletions lib/iris/_lazy_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,10 +450,11 @@ def lazy_elementwise(lazy_array, elementwise_op):
return da.map_blocks(elementwise_op, lazy_array, dtype=dtype)


def map_complete_blocks(src, func, dims, out_sizes):
def map_complete_blocks(src, func, dims, out_sizes, *args, **kwargs):
"""Apply a function to complete blocks.
Complete means that the data is not chunked along the chosen dimensions.
Uses :func:`dask.array.map_blocks` to implement the mapping.
Parameters
----------
Expand All @@ -465,27 +466,47 @@ def map_complete_blocks(src, func, dims, out_sizes):
Dimensions that cannot be chunked.
out_sizes : tuple of int
Output size of dimensions that cannot be chunked.
*args : tuple
Additional arguments to pass to `func`.
**kwargs : dict
Additional keyword arguments to pass to `func`.
Returns
-------
Array-like
See Also
--------
:func:`dask.array.map_blocks` : The function used for the mapping.
"""
data = None
result = None

if is_lazy_data(src):
data = src
elif not hasattr(src, "has_lazy_data"):
# Not a lazy array and not a cube. So treat as ordinary numpy array.
return func(src)
result = func(src, *args, **kwargs)
elif not src.has_lazy_data():
return func(src.data)
result = func(src.data, *args, **kwargs)
else:
data = src.lazy_data()

# Ensure dims are not chunked
in_chunks = list(data.chunks)
for dim in dims:
in_chunks[dim] = src.shape[dim]
data = data.rechunk(in_chunks)
if result is None and data is not None:
# Ensure dims are not chunked
in_chunks = list(data.chunks)
for dim in dims:
in_chunks[dim] = src.shape[dim]
data = data.rechunk(in_chunks)

# Determine output chunks
out_chunks = list(data.chunks)
for dim, size in zip(dims, out_sizes):
out_chunks[dim] = size
# Determine output chunks
out_chunks = list(data.chunks)
for dim, size in zip(dims, out_sizes):
out_chunks[dim] = size

return data.map_blocks(func, chunks=out_chunks, dtype=src.dtype)
result = data.map_blocks(
func, *args, chunks=out_chunks, dtype=src.dtype, **kwargs
)

return result
10 changes: 4 additions & 6 deletions lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1378,18 +1378,16 @@ def _percentile(data, percent, fast_percentile_method=False, **kwargs):
percent = [percent]
percent = np.array(percent)

# Perform the percentile calculation.
_partial_percentile = functools.partial(
result = iris._lazy_data.map_complete_blocks(
data,
_calc_percentile,
(-1,),
percent.shape,
percent=percent,
fast_percentile_method=fast_percentile_method,
**kwargs,
)

result = iris._lazy_data.map_complete_blocks(
data, _partial_percentile, (-1,), percent.shape
)

# Check whether to reduce to a scalar result, as per the behaviour
# of other aggregators.
if result.shape == (1,):
Expand Down
10 changes: 4 additions & 6 deletions lib/iris/analysis/_area_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,20 +392,18 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform(

tgt_shape = (len(grid_y.points), len(grid_x.points))

# Calculate new data array for regridded cube.
regrid = functools.partial(
new_data = map_complete_blocks(
src_cube,
_regrid_along_dims,
(src_y_dim, src_x_dim),
meshgrid_x.shape,
x_dim=src_x_dim,
y_dim=src_y_dim,
weights=weights,
tgt_shape=tgt_shape,
mdtol=mdtol,
)

new_data = map_complete_blocks(
src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape
)

# Wrap up the data as a Cube.

_regrid_callback = functools.partial(
Expand Down
8 changes: 4 additions & 4 deletions lib/iris/analysis/_regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -932,9 +932,11 @@ def __call__(self, src):
x_dim = src.coord_dims(src_x_coord)[0]
y_dim = src.coord_dims(src_y_coord)[0]

# Define regrid function
regrid = functools.partial(
data = map_complete_blocks(
src,
self._regrid,
(y_dim, x_dim),
sample_grid_x.shape,
x_dim=x_dim,
y_dim=y_dim,
src_x_coord=src_x_coord,
Expand All @@ -945,8 +947,6 @@ def __call__(self, src):
extrapolation_mode=self._extrapolation_mode,
)

data = map_complete_blocks(src, regrid, (y_dim, x_dim), sample_grid_x.shape)

# Wrap up the data as a Cube.
_regrid_callback = functools.partial(
self._regrid,
Expand Down

0 comments on commit 3943951

Please sign in to comment.