Skip to content

Commit

Permalink
update(Mf6Splitter): change how node mapping is stored and loaded
Browse files Browse the repository at this point in the history
* `save_node_mapping` now writes a compressed `hdf5` file and stores additional modelgrid reconstruction information for original and split model representations
* `load_node_mapping` changed to a static method that returns a Mf6Splitter object that can be used for array reconstruction
* updated autotests
* added example usage to `mf6_parallel_model_splitting_example.py`
* json node mapping files no longer supported, user must rewrite node mapping files as hdf5 files.
  • Loading branch information
jlarsen-usgs committed Mar 5, 2025
1 parent c61643f commit 737d4ca
Show file tree
Hide file tree
Showing 3 changed files with 486 additions and 106 deletions.
66 changes: 66 additions & 0 deletions .docs/Notebooks/mf6_parallel_model_splitting_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,3 +768,69 @@ def string2geom(geostring, conversion=None):
sa = np.array(seg)
ax.plot(sa[:, 0], sa[:, 1], "b-")
# -

# ### Save node mapping data
#
# The `save_node_mapping()` method allows users to save a HDF5 representation of model splitter information that can be reloaded and used to reconstruct arrays at a later time

# +
filename = workspace / "node_mapping.hdf5"
mfsplit.save_node_mapping(filename)
# -

# ### reloading node mapping data
#
# The `load_node_mapping()` method allows the user to instantiate a Mf6Splitter object from a hdf5 node mapping file for reconstructing output arrays

# +
mfs = Mf6Splitter.load_node_mapping(filename)
# -

# Reconstruct heads using the `Mf6Splitter` object we just created

# +
model_names = list(new_sim.model_names)
head_dict = {}
for modelname in model_names:
mnum = int(modelname.split("_")[-1])
head = new_sim.get_model(modelname).output.head().get_alldata()[-1]
head_dict[mnum] = head

ra_heads = mfs.reconstruct_array(head_dict)
ra_watertable = flopy.utils.postprocessing.get_water_table(ra_heads)
# -

# +
with styles.USGSMap():
fig, axs = plt.subplots(nrows=3, figsize=(8, 12))
diff = ra_heads - heads
hv = [ra_heads, heads, diff]
titles = ["Multiple models", "Single model", "Multiple - single"]
for idx, ax in enumerate(axs):
ax.set_aspect("equal")
ax.set_title(titles[idx])

if idx < 2:
levels = contours
vmin = hmin
vmax = hmax
else:
levels = None
vmin = None
vmax = None

pmv = flopy.plot.PlotMapView(modelgrid=gwf.modelgrid, ax=ax, layer=0)
h = pmv.plot_array(hv[idx], vmin=vmin, vmax=vmax)
if levels is not None:
c = pmv.contour_array(
hv[idx], levels=levels, colors="white", linewidths=0.75, linestyles=":"
)
plt.clabel(c, fontsize=8)
pmv.plot_inactive()
plt.colorbar(h, ax=ax, shrink=0.5)

ax.plot(bp[:, 0], bp[:, 1], "r-")
for seg in segs:
sa = np.array(seg)
ax.plot(sa[:, 0], sa[:, 1], "b-")
# -
90 changes: 84 additions & 6 deletions autotest/test_model_splitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,10 @@ def test_metis_splitting_with_lak_sfr(function_tmpdir):

@requires_exe("mf6")
@requires_pkg("pymetis")
def test_save_load_node_mapping(function_tmpdir):
def test_save_load_node_mapping_structured(function_tmpdir):
sim_path = get_example_data_path() / "mf6-freyberg"
new_sim_path = function_tmpdir / "mf6-freyberg/split_model"
json_file = new_sim_path / "node_map.json"
hdf_file = new_sim_path / "node_map.hdf5"
nparts = 5

sim = MFSimulation.load(sim_ws=sim_path)
Expand All @@ -225,12 +225,11 @@ def test_save_load_node_mapping(function_tmpdir):
new_sim.run_simulation()
original_node_map = mfsplit._node_map

mfsplit.save_node_mapping(json_file)
mfsplit.save_node_mapping(hdf_file)

new_sim2 = MFSimulation.load(sim_ws=new_sim_path)

mfsplit2 = Mf6Splitter(new_sim2)
mfsplit2.load_node_mapping(new_sim2, json_file)
mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)
saved_node_map = mfsplit2._node_map

for k, v1 in original_node_map.items():
Expand All @@ -249,6 +248,86 @@ def test_save_load_node_mapping(function_tmpdir):
np.testing.assert_allclose(new_heads, original_heads, err_msg=err_msg)


@requires_exe("mf6")
def test_save_load_node_mapping_vertex(function_tmpdir):
sim_path = get_example_data_path() / "mf6" / "test003_gwftri_disv"
new_sim_path = function_tmpdir / "split_model"
hdf_file = new_sim_path / "vertex_node_map.hdf5"
sim = MFSimulation.load(sim_ws=sim_path)
sim.set_sim_path(function_tmpdir)
sim.write_simulation()
sim.run_simulation()

gwf = sim.get_model()
modelgrid = gwf.modelgrid

array = np.zeros((modelgrid.ncpl,), dtype=int)
array[0:85] = 1

mfsplit = Mf6Splitter(sim)
new_sim = mfsplit.split_model(array)

new_sim.set_sim_path(new_sim_path)
new_sim.write_simulation()
new_sim.run_simulation()
mfsplit.save_node_mapping(hdf_file)

original_heads = np.squeeze(gwf.output.head().get_alldata()[-1])

ml0 = new_sim.get_model("gwf_1_0")
ml1 = new_sim.get_model("gwf_1_1")
heads0 = ml0.output.head().get_alldata()[-1]
heads1 = ml1.output.head().get_alldata()[-1]

mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)

new_heads = mfsplit2.reconstruct_array({0: heads0, 1: heads1})

err_msg = "Heads from original and split models do not match"
np.testing.assert_allclose(
new_heads, original_heads, rtol=0.002, atol=0.01, err_msg=err_msg
)


@requires_exe("mf6")
def test_save_load_node_mapping_unstructured(function_tmpdir):
sim_path = get_example_data_path() / "mf6" / "test006_gwf3"
new_sim_path = function_tmpdir / "split_model"
hdf_file = new_sim_path / "unstruct_node_mapping.hdf5"

sim = MFSimulation.load(sim_ws=sim_path)
sim.set_sim_path(function_tmpdir)
sim.write_simulation()
sim.run_simulation()

gwf = sim.get_model()
modelgrid = gwf.modelgrid

array = np.zeros((modelgrid.nnodes,), dtype=int)
array[65:] = 1

mfsplit = Mf6Splitter(sim)
new_sim = mfsplit.split_model(array)

new_sim.set_sim_path(new_sim_path)
new_sim.write_simulation()
new_sim.run_simulation()
mfsplit.save_node_mapping(hdf_file)

original_heads = np.squeeze(gwf.output.head().get_alldata()[-1])

ml0 = new_sim.get_model("gwf_1_0")
ml1 = new_sim.get_model("gwf_1_1")
heads0 = ml0.output.head().get_alldata()[-1]
heads1 = ml1.output.head().get_alldata()[-1]

mfsplit2 = Mf6Splitter.load_node_mapping(hdf_file)
new_heads = mfsplit2.reconstruct_array({0: heads0, 1: heads1})

err_msg = "Heads from original and split models do not match"
np.testing.assert_allclose(new_heads, original_heads, err_msg=err_msg)


def test_control_records(function_tmpdir):
nrow = 10
ncol = 10
Expand Down Expand Up @@ -847,7 +926,6 @@ def test_unstructured_complex_disu(function_tmpdir):
raise AssertionError("Reconstructed head results outside of tolerance")


@pytest.mark.slow
@requires_exe("mf6")
@requires_pkg("pymetis", "scipy")
def test_multi_model(function_tmpdir):
Expand Down
Loading

0 comments on commit 737d4ca

Please sign in to comment.