Skip to content

Commit

Permalink
fix(masked_4D_arrays): allow re-use of preceding spd data if empty (#…
Browse files Browse the repository at this point in the history
…2314)

Addresses the issue raised in discussion #2309.

fix(MFPandasTransientList.masked_4D_arrays_itr): allow re-use of preceding spd data if empty (mf6)

    * generalize setting of array shape for different grid types
    * re-use preceding spd array data
    * update relevant test

fix(MfList.masked_4d_arrays): allow re-use of preceding spd data if empty (non-mf6)

    * generalize setting of array shape for different grid types
    * re-use preceding spd array data

fix(model_attributes_to_shapefile): fix exporting of transientlist if disu

    * create separate case for disu
    * update relevant test

test(masked_4D_arrays): add tests

    * compare mf6 and non-mf6 shp export (riv data)

refactor(MFTransientList): re-use masked_4D_arrays_itr implementation
refactor(model_attributes_to_shapefile): remove bare except
refactor(model_attributes_to_shapefile): skip transientlist if empty
refactor(model_attributes_to_shapefile): skip transientlist if all elements are of object type

    * if package has timeseries data, transientlist elements are all of object type

refactor(mflist_export): skip mflist if all elements are of object type

    * if package has timeseries data, mflist elements are all of object type

fix(mfUsgDisU): return ncpl as integer
refactor(model_attributes_to_shapefile): use first recarray kper to check transientlist
refactor(mflist_export): use first recarray kper to check mflist
  • Loading branch information
martclanor authored Oct 20, 2024
1 parent 3a2c494 commit 332a310
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 141 deletions.
39 changes: 27 additions & 12 deletions autotest/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -874,8 +874,10 @@ def test_export_contours(function_tmpdir, example_data_path):


@pytest.mark.mf6
@requires_pkg("shapely")
def test_mf6_grid_shp_export(function_tmpdir):
@requires_pkg("pyshp", "shapely", name_map={"pyshp": "shapefile"})
def test_export_mf6_shp(function_tmpdir):
from shapefile import Reader

nlay = 2
nrow = 10
ncol = 10
Expand Down Expand Up @@ -946,9 +948,6 @@ def test_mf6_grid_shp_export(function_tmpdir):
gwf, pname="dis", nlay=nlay, nrow=nrow, ncol=ncol, top=top, botm=botm
)

def cellid(k, i, j, nrow, ncol):
return k * nrow * ncol + i * ncol + j

# Riv6
spd6 = flopy.mf6.ModflowGwfriv.stress_period_data.empty(
gwf, maxbound=len(spd)
Expand All @@ -957,9 +956,6 @@ def cellid(k, i, j, nrow, ncol):
for c in spd.dtype.names:
if c in spd6[0].dtype.names:
spd6[0][c] = spd[c]
# MFTransient list apparently requires entries for additional stress periods,
# even if they are the same
spd6[1] = spd6[0]
riv6 = flopy.mf6.ModflowGwfriv(gwf, stress_period_data=spd6)
rch6 = flopy.mf6.ModflowGwfrcha(gwf, recharge=rech)

Expand All @@ -971,13 +967,10 @@ def cellid(k, i, j, nrow, ncol):
), f"variable {k} is not equal"
pass

if not has_pkg("shapefile"):
return

m.export(function_tmpdir / "mfnwt.shp")
gwf.export(function_tmpdir / "mf6.shp")

# check that the two shapefiles are the same
# check that the shapefiles are the same
ra = shp2recarray(function_tmpdir / "mfnwt.shp")
ra6 = shp2recarray(function_tmpdir / "mf6.shp")

Expand All @@ -1002,6 +995,28 @@ def cellid(k, i, j, nrow, ncol):
else:
assert np.abs(it - it6) < 1e-6

# Compare exported riv shapefiles
riv.export(function_tmpdir / "riv.shp")
riv6.export(function_tmpdir / "riv6.shp")
with Reader(function_tmpdir / "riv.shp") as riv_shp, Reader(
function_tmpdir / "riv6.shp"
) as riv6_shp:
assert list(riv_shp.shapeRecord(-1).record) == list(
riv6_shp.shapeRecord(-1).record
)

# Check wel export with timeseries
wel_spd_0 = flopy.mf6.ModflowGwfwel.stress_period_data.empty(
gwf, maxbound=1, timeseries=True
)
wel_spd_0[0][0] = ((0, 0, 0), -99.0)
wel = flopy.mf6.ModflowGwfwel(
gwf,
maxbound=1,
stress_period_data={0: wel_spd_0[0]},
)
wel.export(function_tmpdir / "wel_test.shp")


@requires_pkg("pyshp", name_map={"pyshp": "shapefile"})
@pytest.mark.slow
Expand Down
2 changes: 2 additions & 0 deletions autotest/test_gridgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,8 @@ def test_mf6disu(sim_disu_diff_layers):
gwf.modelgrid.write_shapefile(fname)
fname = ws / "model.shp"
gwf.export(fname)
fname = ws / "chd.shp"
gwf.chd.export(fname)

sim.run_simulation(silent=True)
head = gwf.output.head().get_data()
Expand Down
30 changes: 26 additions & 4 deletions flopy/export/shapefile_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,14 +377,36 @@ def model_attributes_to_shapefile(
assert arr.shape == horz_shape
array_dict[name] = arr
elif a.data_type == DataType.transientlist:
try:
list(a.masked_4D_arrays_itr())
except:
# Skip empty transientlist
if not a.data:
continue

# Use first recarray kper to check transientlist
for kper in a.data.keys():
if isinstance(a.data[kper], np.recarray):
break
# Skip transientlist if all elements are of object type
if all(
dtype == np.object_
for dtype, _ in a.data[kper].dtype.fields.values()
):
continue

for name, array in a.masked_4D_arrays_itr():
n = shape_attr_name(name, length=4)
for kper in range(array.shape[0]):
# guard clause for disu case
# array is (kper, node) only
if len(array.shape) == 2:
aname = f"{n}{kper + 1}"
arr = array[kper]
assert arr.shape == horz_shape
if np.all(np.isnan(arr)):
continue
array_dict[aname] = arr
continue
# non-disu case
for k in range(array.shape[1]):
n = shape_attr_name(name, length=4)
aname = f"{n}{k + 1}{kper + 1}"
arr = array[kper][k]
assert arr.shape == horz_shape
Expand Down
10 changes: 10 additions & 0 deletions flopy/export/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -962,6 +962,16 @@ def mflist_export(f: Union[str, os.PathLike, NetCdf], mfl, **kwargs):

elif isinstance(f, NetCdf) or isinstance(f, dict):
base_name = mfl.package.name[0].lower()
# Use first recarray kper to check mflist
for kper in mfl.data.keys():
if isinstance(mfl.data[kper], np.recarray):
break
# Skip mflist if all elements are of object type
if all(
dtype == np.object_
for dtype, _ in mfl.data[kper].dtype.fields.values()
):
return f

for name, array in mfl.masked_4D_arrays_itr():
var_name = f"{base_name}_{name}"
Expand Down
46 changes: 1 addition & 45 deletions flopy/mf6/data/mfdatalist.py
Original file line number Diff line number Diff line change
Expand Up @@ -1546,51 +1546,7 @@ def data(self):

@property
def masked_4D_arrays(self):
"""Returns list data as a masked 4D array."""
model_grid = self.data_dimensions.get_model_grid()
nper = self.data_dimensions.package_dim.model_dim[
0
].simulation_time.get_num_stress_periods()
# get the first kper
arrays = self.to_array(kper=0, mask=True)

if arrays is not None:
# initialize these big arrays
if model_grid.grid_type() == DiscretizationType.DIS:
m4ds = {}
for name, array in arrays.items():
m4d = np.zeros(
(
nper,
model_grid.num_layers,
model_grid.num_rows,
model_grid.num_columns,
)
)
m4d[0, :, :, :] = array
m4ds[name] = m4d
for kper in range(1, nper):
arrays = self.to_array(kper=kper, mask=True)
for name, array in arrays.items():
m4ds[name][kper, :, :, :] = array
return m4ds
else:
m3ds = {}
for name, array in arrays.items():
m3d = np.zeros(
(
nper,
model_grid.num_layers,
model_grid.num_cells_per_layer(),
)
)
m3d[0, :, :] = array
m3ds[name] = m3d
for kper in range(1, nper):
arrays = self.to_array(kper=kper, mask=True)
for name, array in arrays.items():
m3ds[name][kper, :, :] = array
return m3ds
return dict(self.masked_4D_arrays_itr())

def masked_4D_arrays_itr(self):
"""Returns list data as an iterator of a masked 4D array."""
Expand Down
51 changes: 13 additions & 38 deletions flopy/mf6/data/mfdataplist.py
Original file line number Diff line number Diff line change
Expand Up @@ -2278,47 +2278,22 @@ def set_data(self, data, key=None, autofill=False):

def masked_4D_arrays_itr(self):
"""Returns list data as an iterator of a masked 4D array."""
model_grid = self.data_dimensions.get_model_grid()
nper = self.data_dimensions.package_dim.model_dim[
0
].simulation_time.get_num_stress_periods()
# get the first kper
arrays = self.to_array(kper=0, mask=True)

if arrays is not None:
# initialize these big arrays
for name, array in arrays.items():
if model_grid.grid_type() == DiscretizationType.DIS:
m4d = np.zeros(
(
nper,
model_grid.num_layers(),
model_grid.num_rows(),
model_grid.num_columns(),
)
)
m4d[0, :, :, :] = array
for kper in range(1, nper):
arrays = self.to_array(kper=kper, mask=True)
for tname, array in arrays.items():
if tname == name:
m4d[kper, :, :, :] = array
yield name, m4d
else:
m3d = np.zeros(
(
nper,
model_grid.num_layers(),
model_grid.num_cells_per_layer(),
)
)
m3d[0, :, :] = array
for kper in range(1, nper):
arrays = self.to_array(kper=kper, mask=True)
for tname, array in arrays.items():
if tname == name:
m3d[kper, :, :] = array
yield name, m3d

# get the first kper array to extract array shape and names
arrays_kper_0 = self.to_array(kper=0, mask=True)
shape_per_spd = next(iter(arrays_kper_0.values())).shape

for name in arrays_kper_0.keys():
ma = np.zeros((nper, *shape_per_spd))
for kper in range(nper):
# If new_arrays is not None, overwrite arrays
if new_arrays := self.to_array(kper=kper, mask=True):
arrays = new_arrays
ma[kper] = arrays[name]
yield name, ma

def _set_data_record(
self,
Expand Down
2 changes: 1 addition & 1 deletion flopy/mfusg/mfusgdisu.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,7 @@ def zcentroids(self):

@property
def ncpl(self):
return self.nodes / self.nlay
return self.nodes // self.nlay

@classmethod
def load(cls, f, model, ext_unit_dict=None, check=True):
Expand Down
56 changes: 15 additions & 41 deletions flopy/utils/util_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -1106,49 +1106,23 @@ def to_array(self, kper=0, mask=False):

@property
def masked_4D_arrays(self):
# get the first kper
arrays = self.to_array(kper=0, mask=True)

# initialize these big arrays
m4ds = {}
for name, array in arrays.items():
m4d = np.zeros(
(
self._model.nper,
self._model.nlay,
self._model.nrow,
self._model.ncol,
)
)
m4d[0, :, :, :] = array
m4ds[name] = m4d
for kper in range(1, self._model.nper):
arrays = self.to_array(kper=kper, mask=True)
for name, array in arrays.items():
m4ds[name][kper, :, :, :] = array
return m4ds
return dict(self.masked_4D_arrays_itr())

def masked_4D_arrays_itr(self):
# get the first kper
arrays = self.to_array(kper=0, mask=True)

# initialize these big arrays
for name, array in arrays.items():
m4d = np.zeros(
(
self._model.nper,
self._model.nlay,
self._model.nrow,
self._model.ncol,
)
)
m4d[0, :, :, :] = array
for kper in range(1, self._model.nper):
arrays = self.to_array(kper=kper, mask=True)
for tname, array in arrays.items():
if tname == name:
m4d[kper, :, :, :] = array
yield name, m4d
nper = self._model.nper

# get the first kper array to extract array shape and names
arrays_kper_0 = self.to_array(kper=0, mask=True)
shape_per_spd = next(iter(arrays_kper_0.values())).shape

for name in arrays_kper_0.keys():
ma = np.zeros((nper, *shape_per_spd))
for kper in range(nper):
# If new_arrays is not None, overwrite arrays
if new_arrays := self.to_array(kper=kper, mask=True):
arrays = new_arrays
ma[kper] = arrays[name]
yield name, ma

@property
def array(self):
Expand Down

0 comments on commit 332a310

Please sign in to comment.