diff --git a/autotest/regression/test_model_netcdf.py b/autotest/regression/test_model_netcdf.py new file mode 100644 index 0000000000..8cbdbe334f --- /dev/null +++ b/autotest/regression/test_model_netcdf.py @@ -0,0 +1,1302 @@ +import os +import shutil + +import numpy as np +import pytest +import xarray as xr +from modflow_devtools.markers import requires_exe, requires_pkg +from pyproj import CRS + +import flopy +from flopy.discretization.structuredgrid import StructuredGrid +from flopy.discretization.vertexgrid import VertexGrid +from flopy.utils.gridutil import get_disv_kwargs +from flopy.utils.model_netcdf import create_dataset + + +def compare_netcdf(base, gen, projection=False, update=None): + """Check for functional equivalence""" + xrb = xr.open_dataset(base, engine="netcdf4") + xrg = xr.open_dataset(gen, engine="netcdf4") + + # global attributes + for a in xrb.attrs: + assert a in xrg.attrs + if a == "title" or a == "history" or a == "source": + continue + assert xrb.attrs[a] == xrg.attrs[a] + + # coordinates + for coordname, da in xrb.coords.items(): + compare_netcdf_var( + coordname, xrb.coords, xrg.data_vars, xrg.coords, projection, update + ) + + # variables + for varname, da in xrb.data_vars.items(): + if varname == "projection": + if projection: + assert varname in xrg.data_vars + assert "wkt" in da.attrs or "crs_wkt" in da.attrs + if "wkt" in da.attrs: + attr = "wkt" + else: + attr = "crs_wkt" + assert attr in xrg.data_vars[varname].attrs + continue + + compare_netcdf_var( + varname, xrb.data_vars, xrg.data_vars, xrg.coords, projection, update + ) + + +def compare_netcdf_data(base, gen): + """Data comparison check""" + xrb = xr.open_dataset(base, engine="netcdf4") + xrg = xr.open_dataset(gen, engine="netcdf4") + + # coordinates + for coordname, da in xrb.coords.items(): + compare_netcdf_var(coordname, xrb.coords, xrg.data_vars, xrg.coords) + + # variables + for varname, da in xrb.data_vars.items(): + if varname == "projection": + continue + + compare_netcdf_var(varname, xrb.data_vars, xrg.data_vars, xrg.coords) + + +def compare_netcdf_var(varname, base_d, gen_d, coord_d, projection=False, update=None): + # check variable name + assert varname in gen_d or varname in coord_d + + if varname in gen_d: + var_d = gen_d + else: + var_d = coord_d + + # encodings + for e in base_d[varname].encoding: + assert e in var_d[varname].encoding + if e.lower() == "source": + continue + if e == "_FillValue": + if np.isnan(base_d[varname].encoding[e]): + assert np.isnan(var_d[varname].encoding[e]) + else: + assert np.allclose( + base_d[varname].encoding[e], var_d[varname].encoding[e] + ) + else: + assert base_d[varname].encoding[e] == var_d[varname].encoding[e] + + # check variable attributes + for a in base_d[varname].attrs: + if a == "grid_mapping" and not projection: + continue + assert a in var_d[varname].attrs + assert base_d[varname].attrs[a] == var_d[varname].attrs[a] + + # check variable data + print(f"NetCDF file check data equivalence for variable: {varname}") + if update and varname in update: + assert np.allclose(update[varname], var_d[varname].data) + else: + assert np.allclose(base_d[varname].data, var_d[varname].data) + + +@pytest.mark.regression +def test_load_gwfsto01(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_sto01_mesh": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "mesh2d", + }, + "test_gwf_sto01_structured": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "structured", + }, + } + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # load example + sim = flopy.mf6.MFSimulation.load(sim_ws=base_path) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf=test["netcdf_type"]) + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + # "gwf_sto01.dis.ncf": # TODO wkt string missing on write? + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1.lower() == line2.lower() + else: + compare_netcdf(base, gen) + + +@pytest.mark.regression +def test_update_gwfsto01(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_sto01_mesh": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "mesh2d", + }, + "test_gwf_sto01_structured": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "structured", + }, + } + + nlay, nrow, ncol = 3, 10, 10 + + dis_top = np.full((nrow, ncol), 50) + + # ic + strt1 = np.full((nrow, ncol), 0.15) + strt2 = np.full((nrow, ncol), 0.21) + strt3 = np.full((nrow, ncol), 1.21) + ic_strt = np.array([strt1, strt2, strt3]) + + update = { + "dis_top": dis_top.flatten()[0], + "ic_strt": ic_strt, + "ic_strt_l1": ic_strt[0].flatten(), + "ic_strt_l2": ic_strt[1].flatten(), + "ic_strt_l3": ic_strt[2].flatten(), + } + + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # load example + sim = flopy.mf6.MFSimulation.load(sim_ws=base_path) + + # get model instance + gwf = sim.get_model("gwf_sto01") + + # update dis top + gwf.dis.top = dis_top + + # update ic strt + gwf.ic.strt.set_data(ic_strt) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf=test["netcdf_type"]) + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + # "gwf_sto01.dis.ncf": # TODO wkt string missing on write? + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1.lower() == line2.lower() + else: + compare_netcdf(base, gen, update=update) + + +@pytest.mark.regression +def test_create_gwfsto01(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_sto01_mesh": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "mesh2d", + }, + "test_gwf_sto01_structured": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "structured", + }, + } + name = "gwf_sto01" + + # static model data + # temporal discretization + nper = 31 + perlen = [1.0] + [365.2500000 for _ in range(nper - 1)] + nstp = [1] + [6 for _ in range(nper - 1)] + tsmult = [1.0] + [1.3 for _ in range(nper - 1)] + tdis_rc = [] + for i in range(nper): + tdis_rc.append((perlen[i], nstp[i], tsmult[i])) + + # spatial discretization data + nlay, nrow, ncol = 3, 10, 10 + delr, delc = 1000.0, 2000.0 + botm = [-100, -150.0, -350.0] + strt = 0.0 + + # calculate hk + hk1fact = 1.0 / 50.0 + hk1 = np.ones((nrow, ncol), dtype=float) * 0.5 * hk1fact + hk1[0, :] = 1000.0 * hk1fact + hk1[-1, :] = 1000.0 * hk1fact + hk1[:, 0] = 1000.0 * hk1fact + hk1[:, -1] = 1000.0 * hk1fact + hk = [20.0, hk1, 5.0] + + # calculate vka + vka = [1e6, 7.5e-5, 1e6] + + # all cells are active and layer 1 is convertible + ib = 1 + laytyp = [1, 0, 0] + + # solver options + nouter, ninner = 500, 300 + hclose, rclose, relax = 1e-9, 1e-6, 1.0 + newtonoptions = "NEWTON" + imsla = "BICGSTAB" + + # chd data + c = [] + c6 = [] + ccol = [3, 4, 5, 6] + for j in ccol: + c.append([0, nrow - 1, j, strt, strt]) + c6.append([(0, nrow - 1, j), strt]) + cd = {0: c} + cd6 = {0: c6} + maxchd = len(cd[0]) + + # pumping well data + wr = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3] + wc = [0, 1, 8, 9, 0, 9, 0, 9, 0, 0] + wrp = [2, 2, 3, 3] + wcp = [5, 6, 5, 6] + wq = [-14000.0, -8000.0, -5000.0, -3000.0] + d = [] + d6 = [] + for r, c, q in zip(wrp, wcp, wq): + d.append([2, r, c, q]) + d6.append([(2, r, c), q]) + wd = {1: d} + wd6 = {1: d6} + maxwel = len(wd[1]) + + # recharge data + q = 3000.0 / (delr * delc) + v = np.zeros((nrow, ncol), dtype=float) + for r, c in zip(wr, wc): + v[r, c] = q + rech = {0: v} + + # storage and compaction data + ske = [6e-4, 3e-4, 6e-4] + + # build + ws = function_tmpdir / "ws" + for idx, (dirname, test) in enumerate(tests.items()): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # build MODFLOW 6 files + sim = flopy.mf6.MFSimulation( + sim_name=name, version="mf6", exe_name="mf6", sim_ws=ws + ) + + # create tdis package + tdis = flopy.mf6.ModflowTdis( + sim, + time_units="DAYS", + start_date_time="2041-01-01t00:00:00-05:00", + nper=nper, + perioddata=tdis_rc, + ) + + # create gwf model + top = 0.0 + zthick = [top - botm[0], botm[0] - botm[1], botm[1] - botm[2]] + elevs = [top] + botm + + # create model + kwargs = {} + kwargs["crs"] = "EPSG:26918" + gwf = flopy.mf6.ModflowGwf( + sim, modelname=name, newtonoptions=newtonoptions, save_flows=True, **kwargs + ) + + # create iterative model solution and register the gwf model with it + ims = flopy.mf6.ModflowIms( + sim, + print_option="SUMMARY", + outer_dvclose=hclose, + outer_maximum=nouter, + under_relaxation="NONE", + inner_maximum=ninner, + inner_dvclose=hclose, + rcloserecord=rclose, + linear_acceleration=imsla, + scaling_method="NONE", + reordering_method="NONE", + relaxation_factor=relax, + ) + sim.register_ims_package(ims, [gwf.name]) + + # dis + dis = flopy.mf6.ModflowGwfdis( + gwf, + nlay=nlay, + nrow=nrow, + ncol=ncol, + delr=delr, + delc=delc, + top=top, + botm=botm, + filename=f"{name}.dis", + ) + + # initial conditions + ic = flopy.mf6.ModflowGwfic(gwf, strt=strt, filename=f"{name}.ic") + + # node property flow + npf = flopy.mf6.ModflowGwfnpf( + gwf, + save_flows=False, + icelltype=laytyp, + k=hk, + k33=vka, + ) + + # storage + sto = flopy.mf6.ModflowGwfsto( + gwf, + save_flows=False, + iconvert=laytyp, + ss=ske, + sy=0, + storagecoefficient=None, + steady_state={0: True}, + transient={1: True}, + ) + + # recharge + rch = flopy.mf6.ModflowGwfrcha(gwf, readasarrays=True, recharge=rech) + + # wel file + wel = flopy.mf6.ModflowGwfwel( + gwf, + print_input=True, + print_flows=True, + maxbound=maxwel, + stress_period_data=wd6, + save_flows=False, + ) + + # chd files + chd = flopy.mf6.modflow.mfgwfchd.ModflowGwfchd( + gwf, maxbound=maxchd, stress_period_data=cd6, save_flows=False + ) + + # output control + oc = flopy.mf6.ModflowGwfoc( + gwf, + budget_filerecord=f"{name}.cbc", + head_filerecord=f"{name}.hds", + headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], + saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], + printrecord=[("HEAD", "LAST"), ("BUDGET", "ALL")], + ) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf=test["netcdf_type"]) + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1 == line2 + else: + compare_netcdf(base, gen) + + +@pytest.mark.regression +def test_gwfsto01(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_sto01_mesh": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "mesh2d", + }, + "test_gwf_sto01_structured": { + "base_sim_dir": "gwf_sto01", + "netcdf_output_file": "gwf_sto01.in.nc", + "netcdf_type": "structured", + }, + } + + # spatial discretization data + nlay, nrow, ncol = 3, 10, 10 + delr = [1000.0] + delc = [2000.0] + top = np.full((nrow, ncol), 0.0) + botm = [] + botm.append(np.full((nrow, ncol), -100.0)) + botm.append(np.full((nrow, ncol), -150.0)) + botm.append(np.full((nrow, ncol), -350.0)) + botm = np.array(botm) + + # ic + strt = np.full((nlay, nrow, ncol), 0.0) + + # npf + # icelltype + ic1 = np.full((nrow, ncol), np.int32(1)) + ic2 = np.full((nrow, ncol), np.int32(0)) + ic3 = np.full((nrow, ncol), np.int32(0)) + icelltype = np.array([ic1, ic2, ic3]) + + # k + hk2fact = 1.0 / 50.0 + hk2 = np.ones((nrow, ncol), dtype=float) * 0.5 * hk2fact + hk2[0, :] = 1000.0 * hk2fact + hk2[-1, :] = 1000.0 * hk2fact + hk2[:, 0] = 1000.0 * hk2fact + hk2[:, -1] = 1000.0 * hk2fact + k1 = np.full((nrow, ncol), 20.0) + k3 = np.full((nrow, ncol), 5.0) + k = np.array([k1, hk2, k3]) + + # k33 + k33_1 = np.full((nrow, ncol), 1e6) + k33_2 = np.full((nrow, ncol), 7.5e-5) + k33_3 = np.full((nrow, ncol), 1e6) + k33 = np.array([k33_1, k33_2, k33_3]) + + # sto + iconvert = icelltype + + # storage and compaction data + ss1 = np.full((nrow, ncol), 6e-4) + ss2 = np.full((nrow, ncol), 3e-4) + ss3 = np.full((nrow, ncol), 6e-4) + ss = np.array([ss1, ss2, ss3]) + sy = np.full((nlay, nrow, ncol), 0.0) + + # define longnames + delr_longname = "spacing along a row" + delc_longname = "spacing along a column" + top_longname = "cell top elevation" + botm_longname = "cell bottom elevation" + icelltype_longname = "confined or convertible indicator" + k_longname = "hydraulic conductivity (L/T)" + k33_longname = "hydraulic conductivity of third ellipsoid axis (L/T)" + iconvert_longname = "convertible indicator" + ss_longname = "specific storage" + sy_longname = "specific yield" + strt_longname = "starting head" + + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + os.mkdir(test_path) + + # create discretization + dis = flopy.discretization.StructuredGrid( + delc=np.array(delc * nrow), + delr=np.array(delr * ncol), + top=top, + botm=botm, + nlay=nlay, + nrow=nrow, + ncol=ncol, + crs="EPSG:26918", + ) + + # create the dataset + ds = create_dataset( + "gwf6", + "gwf_sto01", + test["netcdf_type"], + test["netcdf_output_file"], + dis, + ) + + # add dis arrays + ds.create_array("dis", "delc", dis.delc, ["nrow"], delc_longname) + ds.create_array("dis", "delr", dis.delr, ["ncol"], delr_longname) + ds.create_array("dis", "top", dis.top, ["nrow", "ncol"], top_longname) + ds.create_array( + "dis", "botm", dis.botm, ["nlay", "nrow", "ncol"], botm_longname + ) + + # add ic array + ds.create_array("ic", "strt", strt, ["nlay", "nrow", "ncol"], strt_longname) + + # add npf arrays + ds.create_array( + "npf", "icelltype", icelltype, ["nlay", "nrow", "ncol"], icelltype_longname + ) + ds.create_array("npf", "k", k, ["nlay", "nrow", "ncol"], k_longname) + ds.create_array("npf", "k33", k33, ["nlay", "nrow", "ncol"], k33_longname) + + # add sto arrays + ds.create_array( + "sto", "iconvert", iconvert, ["nlay", "nrow", "ncol"], iconvert_longname + ) + ds.create_array("sto", "ss", ss, ["nlay", "nrow", "ncol"], ss_longname) + ds.create_array("sto", "sy", sy, ["nlay", "nrow", "ncol"], sy_longname) + + # write to netcdf + ds.write(test_path) + + # compare + compare_netcdf( + os.path.join(base_path, test["netcdf_output_file"]), + os.path.join(test_path, test["netcdf_output_file"]), + projection=True, + ) + + +@pytest.mark.regression +def test_load_disv01b(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_disv01b": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + }, + } + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # load example + sim = flopy.mf6.MFSimulation.load(sim_ws=base_path) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf="mesh2d") + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1.lower() == line2.lower() + else: + compare_netcdf(base, gen) + + +@pytest.mark.regression +def test_update_disv01b(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_disv01b": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + }, + } + + nlay, nrow, ncol = 3, 3, 3 + ncpl = nrow * ncol + strt = np.full((nlay, ncpl), 0.999) + + idomain = np.array( + [ + [1, 0, 0, 0, 1, 1, 1, 1, 1], + [1, 1, 1, 0, 0, 0, 1, 1, 1], + [1, 1, 1, 1, 1, 1, 0, 0, 0], + ] + ) + + botm = [ + [-15.0, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0], + [-25.0, -25.0, -25.0, -25.0, -25.0, -25.0, -25.0, -25.0, -25.0], + [-35.0, -35.0, -35.0, -35.0, -35.0, -35.0, -35.0, -35.0, -35.0], + ] + + update = { + "disv_idomain_l1": idomain[0], + "disv_idomain_l2": idomain[1], + "disv_idomain_l3": idomain[2], + "disv_botm_l1": botm[0], + "disv_botm_l2": botm[1], + "disv_botm_l3": botm[2], + "ic_strt_l1": strt[0], + "ic_strt_l2": strt[1], + "ic_strt_l3": strt[2], + } + + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # load example + sim = flopy.mf6.MFSimulation.load(sim_ws=base_path) + + # get model instance + gwf = sim.get_model("disv01b") + + # update disv idomain and botm + gwf.disv.idomain = idomain + gwf.disv.botm.set_data(botm) + + # update ic strt + gwf.ic.strt.set_data(strt) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf="mesh2d") + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1.lower() == line2.lower() + else: + compare_netcdf(base, gen, update=update) + + +@pytest.mark.regression +def test_create_disv01b(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_disv01b": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + "netcdf_type": "mesh2d", + }, + } + + name = "disv01b" + nlay = 3 + nrow = 3 + ncol = 3 + delr = 10.0 + delc = 10.0 + top = 0 + botm = [-10, -20, -30] + xoff = 100000000.0 + yoff = 100000000.0 + disvkwargs = get_disv_kwargs(nlay, nrow, ncol, delr, delc, top, botm, xoff, yoff) + idomain = np.ones((nlay, nrow * ncol), dtype=int) + idomain[0, 1] = 0 + disvkwargs["idomain"] = idomain + + # build + ws = function_tmpdir / "ws" + for idx, (dirname, test) in enumerate(tests.items()): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # create simulation + sim = flopy.mf6.MFSimulation( + sim_name=name, + version="mf6", + exe_name="mf6", + sim_ws=ws, + ) + tdis = flopy.mf6.ModflowTdis(sim, start_date_time="2041-01-01t00:00:00-05:00") + kwargs = {} + kwargs["crs"] = "EPSG:26918" + gwf = flopy.mf6.ModflowGwf(sim, modelname=name, **kwargs) + ims = flopy.mf6.ModflowIms(sim, print_option="SUMMARY") + disv = flopy.mf6.ModflowGwfdisv(gwf, **disvkwargs) + ic = flopy.mf6.ModflowGwfic(gwf, strt=0.0) + npf = flopy.mf6.ModflowGwfnpf(gwf) + spd = {0: [[(0, 0), 1.0], [(0, nrow * ncol - 1), 0.0]]} + chd = flopy.mf6.modflow.mfgwfchd.ModflowGwfchd(gwf, stress_period_data=spd) + oc = flopy.mf6.ModflowGwfoc( + gwf, + head_filerecord=f"{name}.hds", + saverecord=[("HEAD", "ALL")], + ) + + # set path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf=test["netcdf_type"]) + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + assert line1 == line2 + else: + compare_netcdf(base, gen) + + +@pytest.mark.regression +def test_disv01b(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_gwf_disv01b": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + "netcdf_type": "mesh2d", + }, + } + + nlay, nrow, ncol = 3, 3, 3 + ncpl = nrow * ncol + + vertices = [ + (0, 1.0000000e08, 1.0000003e08), + (1, 1.0000001e08, 1.0000003e08), + (2, 1.0000002e08, 1.0000003e08), + (3, 1.0000003e08, 1.0000003e08), + (4, 1.0000000e08, 1.0000002e08), + (5, 1.0000001e08, 1.0000002e08), + (6, 1.0000002e08, 1.0000002e08), + (7, 1.0000003e08, 1.0000002e08), + (8, 1.0000000e08, 1.0000001e08), + (9, 1.0000001e08, 1.0000001e08), + (10, 1.0000002e08, 1.0000001e08), + (11, 1.0000003e08, 1.0000001e08), + (12, 1.0000000e08, 1.0000000e08), + (13, 1.0000001e08, 1.0000000e08), + (14, 1.0000002e08, 1.0000000e08), + (15, 1.0000003e08, 1.0000000e08), + ] + + cell2d = [ + (0, 1.00000005e08, 1.00000025e08, 4, 0, 1, 5, 4), + (1, 1.00000015e08, 1.00000025e08, 4, 1, 2, 6, 5), + (2, 1.00000025e08, 1.00000025e08, 4, 2, 3, 7, 6), + (3, 1.00000005e08, 1.00000015e08, 4, 4, 5, 9, 8), + (4, 1.00000015e08, 1.00000015e08, 4, 5, 6, 10, 9), + (5, 1.00000025e08, 1.00000015e08, 4, 6, 7, 11, 10), + (6, 1.00000005e08, 1.00000005e08, 4, 8, 9, 13, 12), + (7, 1.00000015e08, 1.00000005e08, 4, 9, 10, 14, 13), + (8, 1.00000025e08, 1.00000005e08, 4, 10, 11, 15, 14), + ] + + top = np.array(np.full((ncpl), 0.0)) + + idomain = np.array( + [ + [1, 0, 1, 1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1, 1, 1, 1], + ], + dtype=np.int32, + ) + + botm = [] + botm.append(np.full((ncpl), -10.0)) + botm.append(np.full((ncpl), -20.0)) + botm.append(np.full((ncpl), -30.0)) + botm = np.array(botm) + + # npf + icelltype = np.full((nlay, ncpl), np.int32(0)) + k = np.full((nlay, ncpl), 1.0) + + # ic + strt = np.full((nlay, ncpl), 0.0) + + # define longnames + top_longname = "model top elevation" + botm_longname = "model bottom elevation" + idomain_longname = "idomain existence array" + icelltype_longname = "confined or convertible indicator" + k_longname = "hydraulic conductivity (L/T)" + strt_longname = "starting head" + + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + os.mkdir(test_path) + + # create discretization + disv = VertexGrid( + vertices=vertices, + cell2d=cell2d, + top=top, + idomain=idomain, + botm=botm, + nlay=nlay, + ncpl=ncpl, + crs="EPSG:26918", + ) + + # create dataset + ds = create_dataset( + "gwf6", + "disv01b", + test["netcdf_type"], + test["netcdf_output_file"], + disv, + ) + + # add dis arrays + ds.create_array("disv", "top", disv.top, ["ncpl"], top_longname) + ds.create_array("disv", "botm", disv.botm, ["nlay", "ncpl"], botm_longname) + ds.create_array( + "disv", "idomain", disv.idomain, ["nlay", "ncpl"], idomain_longname + ) + + # add npf arrays + ds.create_array( + "npf", "icelltype", icelltype, ["nlay", "ncpl"], icelltype_longname + ) + ds.create_array("npf", "k", k, ["nlay", "ncpl"], k_longname) + + # add ic arrays + ds.create_array("ic", "strt", strt, ["nlay", "ncpl"], strt_longname) + + # write to netcdf + ds.write(test_path) + + # compare + compare_netcdf( + os.path.join(base_path, test["netcdf_output_file"]), + os.path.join(test_path, test["netcdf_output_file"]), + projection=True, + ) + + +@pytest.mark.regression +def test_dis_transform(function_tmpdir, example_data_path): + transform_ws = function_tmpdir + cmp_pth = transform_ws / "compare" + nc_types = ["mesh2d", "structured"] + data_path_base = example_data_path / "mf6" / "netcdf" / "test_dis_transform" + shutil.copytree(data_path_base, cmp_pth) + + # define transform / projection info + kwargs = {} + kwargs["crs"] = "EPSG:31370" + kwargs["xll"] = 199000 + kwargs["yll"] = 215500 + kwargs["rotation"] = 30 + + # create simulation + nlay, nrow, ncol = 3, 10, 15 + sim = flopy.mf6.MFSimulation(sim_ws=transform_ws, sim_name="transform") + flopy.mf6.ModflowTdis(sim) + flopy.mf6.ModflowIms(sim, complexity="simple") + + gwf = flopy.mf6.ModflowGwf(sim, modelname="transform", print_input=True, **kwargs) + flopy.mf6.ModflowGwfdis( + gwf, + nlay=nlay, + nrow=nrow, + ncol=ncol, + delr=1.0, + delc=1.0, + top=10.0, + botm=[0.0, -10.0, -30.0], + xorigin=199000, + yorigin=215500, + angrot=30, + ) + flopy.mf6.ModflowGwfnpf( + gwf, + icelltype=[1, 1, 1], + ) + strt = np.array([np.linspace(-0.999, 0.999, nlay * nrow * ncol)]) + flopy.mf6.ModflowGwfic( + gwf, + # strt=strt, + strt=10.0, + ) + data = {0: [[(0, 0, 0), 1.0000000], [(1, 0, 14), 0.0000000]]} + flopy.mf6.ModflowGwfchd( + gwf, + stress_period_data=data, + ) + + for t in nc_types: + ws = transform_ws / t + sim.set_sim_path(ws) + sim.write_simulation(netcdf=t) + compare_netcdf( + cmp_pth / f"transform.{t}.nc", ws / "transform.in.nc", projection=True + ) + + +@requires_exe("triangle") +@pytest.mark.regression +def test_disv_transform(function_tmpdir, example_data_path): + # create triangular grid + from flopy.utils.triangle import Triangle + + transform_ws = function_tmpdir + cmp_pth = transform_ws / "compare" + data_path_base = example_data_path / "mf6" / "netcdf" / "test_disv_transform" + shutil.copytree(data_path_base, cmp_pth) + + nc_type = "mesh2d" + + triangle_ws = transform_ws / "triangle" + triangle_ws.mkdir() + + active_area = [(0, 0), (0, 1000), (1000, 1000), (1000, 0)] + tri = Triangle(model_ws=triangle_ws, angle=30) + tri.add_polygon(active_area) + tri.add_region((1, 1), maximum_area=50**2) + + tri.build() + + # strt array + strt = np.array([np.linspace(-0.999, 0.999, len(tri.get_cell2d()))]) + + ### + # vertex discretization based run + vertex_ws = triangle_ws / "vertex" + os.mkdir(vertex_ws) + + # build vertex grid object + vgrid = flopy.discretization.VertexGrid( + vertices=tri.get_vertices(), + cell2d=tri.get_cell2d(), + xoff=199000, + yoff=215500, + crs=31370, + angrot=30, + ) + + ds = create_dataset( + "example", # model type + "trimodel", # model name + nc_type, # netcdf file type + "tri.nc", # netcdf file name + vgrid, + ) + + ds.create_array("start_conditions", "head", strt, ["nlay", "ncpl"], None) + + # write to netcdf + ds.write(vertex_ws) + + ### + # MOFLOW 6 sim based run + mf6_ws = triangle_ws / "mf6" + sim = flopy.mf6.MFSimulation( + sim_name="tri_disv", + version="mf6", + exe_name="mf6", + sim_ws=mf6_ws, + ) + tdis = flopy.mf6.ModflowTdis(sim, start_date_time="2041-01-01t00:00:00-05:00") + + # set projection and transform info + kwargs = {} + kwargs["crs"] = "EPSG:31370" + kwargs["xll"] = 199000 + kwargs["yll"] = 215500 + kwargs["rotation"] = 30 + + gwf = flopy.mf6.ModflowGwf(sim, modelname="tri", **kwargs) + ims = flopy.mf6.ModflowIms(sim, print_option="SUMMARY") + disv = flopy.mf6.ModflowGwfdisv( + gwf, + nlay=1, + ncpl=tri.ncpl, + nvert=tri.nvert, + vertices=tri.get_vertices(), + cell2d=tri.get_cell2d(), + top=0, + botm=-150, + ) + ic = flopy.mf6.ModflowGwfic(gwf, strt=strt) + npf = flopy.mf6.ModflowGwfnpf(gwf) + data = {0: [[(0, 0), 1.0000000], [(0, 14), 0.0000000]]} + chd = flopy.mf6.modflow.mfgwfchd.ModflowGwfchd(gwf, stress_period_data=data) + + sim.write_simulation(netcdf=nc_type) + + compare_netcdf(cmp_pth / f"tri.{nc_type}.nc", mf6_ws / "tri.in.nc", projection=True) + + +@pytest.mark.regression +def test_utlncf_load(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_utlncf_load": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + }, + } + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # load example + sim = flopy.mf6.MFSimulation.load(sim_ws=base_path) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf="mesh2d") + + # compare generated files + gen_files = [ + f + for f in os.listdir(test_path) + if os.path.isfile(os.path.join(test_path, f)) + ] + base_files = [ + f + for f in os.listdir(base_path) + if os.path.isfile(os.path.join(base_path, f)) + ] + + assert len(gen_files) == len(base_files) + for f in base_files: + base = os.path.join(base_path, f) + gen = os.path.join(test_path, f) + if f != test["netcdf_output_file"]: + with open(base, "r") as file1, open(gen, "r") as file2: + # Skip first line + next(file1) + next(file2) + + for line1, line2 in zip(file1, file2): + if line1.lower().startswith(" wkt"): + break + assert line1.lower() == line2.lower() + else: + compare_netcdf_data(base, gen) + + +@pytest.mark.regression +def test_utlncf_create(function_tmpdir, example_data_path): + data_path_base = example_data_path / "mf6" / "netcdf" + tests = { + "test_utlncf_create": { + "base_sim_dir": "disv01b", + "netcdf_output_file": "disv01b.in.nc", + "netcdf_type": "mesh2d", + }, + } + + name = "disv01b" + nlay = 3 + nrow = 3 + ncol = 3 + delr = 10.0 + delc = 10.0 + top = 0 + botm = [-10, -20, -30] + xoff = 100000000.0 + yoff = 100000000.0 + disvkwargs = get_disv_kwargs(nlay, nrow, ncol, delr, delc, top, botm, xoff, yoff) + idomain = np.ones((nlay, nrow * ncol), dtype=int) + idomain[0, 1] = 0 + disvkwargs["idomain"] = idomain + + ws = function_tmpdir / "ws" + for dirname, test in tests.items(): + data_path = os.path.join(data_path_base, dirname, test["base_sim_dir"]) + + # copy example data into working directory + base_path = os.path.join(ws, f"{dirname}_base") + test_path = os.path.join(ws, f"{dirname}_test") + shutil.copytree(data_path, base_path) + + # create example + sim = flopy.mf6.MFSimulation( + sim_name=name, + version="mf6", + exe_name="mf6", + sim_ws=ws, + ) + tdis = flopy.mf6.ModflowTdis(sim, start_date_time="2041-01-01t00:00:00-05:00") + kwargs = {} + kwargs["crs"] = "EPSG:26918" + gwf = flopy.mf6.ModflowGwf(sim, modelname=name, **kwargs) + ims = flopy.mf6.ModflowIms(sim, print_option="SUMMARY") + disv = flopy.mf6.ModflowGwfdisv(gwf, **disvkwargs) + ncf = flopy.mf6.ModflowUtlncf( + disv, + deflate=9, + shuffle=True, + chunk_time=1, + chunk_face=3, + filename=f"{name}.disv.ncf", + ) + ic = flopy.mf6.ModflowGwfic(gwf, strt=0.0) + npf = flopy.mf6.ModflowGwfnpf(gwf) + spd = {0: [[(0, 0), 1.0], [(0, nrow * ncol - 1), 0.0]]} + chd = flopy.mf6.modflow.mfgwfchd.ModflowGwfchd(gwf, stress_period_data=spd) + oc = flopy.mf6.ModflowGwfoc( + gwf, + head_filerecord=f"{name}.hds", + saverecord=[("HEAD", "ALL")], + ) + + # set path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf=test["netcdf_type"]) + + # set simulation path and write simulation + sim.set_sim_path(test_path) + sim.write_simulation(netcdf="mesh2d") + + # compare generated files + compare_path = os.path.join(base_path, "compare") + base = os.path.join(compare_path, "disv01b.in.nc") + gen = os.path.join(test_path, test["netcdf_output_file"]) + compare_netcdf_data(base, gen) diff --git a/etc/environment.yml b/etc/environment.yml index 4c42175c90..138c98d16b 100644 --- a/etc/environment.yml +++ b/etc/environment.yml @@ -61,5 +61,6 @@ dependencies: - scipy - shapely>=2.0 - vtk + - xarray - xmipy - h5py diff --git a/examples/data/mf6/netcdf/test_dis_transform/transform.mesh2d.nc b/examples/data/mf6/netcdf/test_dis_transform/transform.mesh2d.nc new file mode 100644 index 0000000000..4a35df6c87 Binary files /dev/null and b/examples/data/mf6/netcdf/test_dis_transform/transform.mesh2d.nc differ diff --git a/examples/data/mf6/netcdf/test_dis_transform/transform.structured.nc b/examples/data/mf6/netcdf/test_dis_transform/transform.structured.nc new file mode 100644 index 0000000000..85e6749a1a Binary files /dev/null and b/examples/data/mf6/netcdf/test_dis_transform/transform.structured.nc differ diff --git a/examples/data/mf6/netcdf/test_disv_transform/tri.mesh2d.nc b/examples/data/mf6/netcdf/test_disv_transform/tri.mesh2d.nc new file mode 100644 index 0000000000..7a2ecaa6dd Binary files /dev/null and b/examples/data/mf6/netcdf/test_disv_transform/tri.mesh2d.nc differ diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.chd b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.chd new file mode 100644 index 0000000000..c2ca41a58f --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.chd @@ -0,0 +1,13 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/14/2025 at 13:13:00. +BEGIN options +END options + +BEGIN dimensions + MAXBOUND 2 +END dimensions + +BEGIN period 1 + 1 1 1.00000000E+00 + 1 9 0.00000000E+00 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.disv b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.disv new file mode 100644 index 0000000000..584563b5d3 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.disv @@ -0,0 +1,47 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN dimensions + NLAY 3 + NCPL 9 + NVERT 16 +END dimensions + +BEGIN griddata + top NETCDF + botm NETCDF + idomain NETCDF +END griddata + +BEGIN vertices + 1 1.00000000E+08 1.00000030E+08 + 2 1.00000010E+08 1.00000030E+08 + 3 1.00000020E+08 1.00000030E+08 + 4 1.00000030E+08 1.00000030E+08 + 5 1.00000000E+08 1.00000020E+08 + 6 1.00000010E+08 1.00000020E+08 + 7 1.00000020E+08 1.00000020E+08 + 8 1.00000030E+08 1.00000020E+08 + 9 1.00000000E+08 1.00000010E+08 + 10 1.00000010E+08 1.00000010E+08 + 11 1.00000020E+08 1.00000010E+08 + 12 1.00000030E+08 1.00000010E+08 + 13 1.00000000E+08 1.00000000E+08 + 14 1.00000010E+08 1.00000000E+08 + 15 1.00000020E+08 1.00000000E+08 + 16 1.00000030E+08 1.00000000E+08 +END vertices + +BEGIN cell2d + 1 1.00000005E+08 1.00000025E+08 4 1 2 6 5 + 2 1.00000015E+08 1.00000025E+08 4 2 3 7 6 + 3 1.00000025E+08 1.00000025E+08 4 3 4 8 7 + 4 1.00000005E+08 1.00000015E+08 4 5 6 10 9 + 5 1.00000015E+08 1.00000015E+08 4 6 7 11 10 + 6 1.00000025E+08 1.00000015E+08 4 7 8 12 11 + 7 1.00000005E+08 1.00000005E+08 4 9 10 14 13 + 8 1.00000015E+08 1.00000005E+08 4 10 11 15 14 + 9 1.00000025E+08 1.00000005E+08 4 11 12 16 15 +END cell2d + diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ic b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ic new file mode 100644 index 0000000000..b5a590efc1 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ic @@ -0,0 +1,7 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + strt NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ims b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ims new file mode 100644 index 0000000000..2ed531a581 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.ims @@ -0,0 +1,5 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/14/2025 at 13:12:59. +BEGIN options + PRINT_OPTION summary +END options + diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.in.nc b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.in.nc new file mode 100644 index 0000000000..1579075ff2 Binary files /dev/null and b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.in.nc differ diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.nam b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.nam new file mode 100644 index 0000000000..456a735e7a --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.nam @@ -0,0 +1,12 @@ +# NOT generated by Flopy +BEGIN options + NETCDF FILEIN disv01b.in.nc +END options + +BEGIN packages + DISV6 disv01b.disv disv + IC6 disv01b.ic ic + NPF6 disv01b.npf npf + CHD6 disv01b.chd chd_0 + OC6 disv01b.oc oc +END packages diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.npf b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.npf new file mode 100644 index 0000000000..7969e38aa9 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.npf @@ -0,0 +1,8 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + icelltype NETCDF + k NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.oc b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.oc new file mode 100644 index 0000000000..e3b1201a8d --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.oc @@ -0,0 +1,9 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/14/2025 at 13:13:00. +BEGIN options + HEAD FILEOUT disv01b.hds +END options + +BEGIN period 1 + SAVE HEAD ALL +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.tdis b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.tdis new file mode 100644 index 0000000000..e59a76bd13 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/disv01b.tdis @@ -0,0 +1,13 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/14/2025 at 13:12:59. +BEGIN options + START_DATE_TIME 2041-01-01t00:00:00-05:00 +END options + +BEGIN dimensions + NPER 1 +END dimensions + +BEGIN perioddata + 1.00000000 1 1.00000000 +END perioddata + diff --git a/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/mfsim.nam b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/mfsim.nam new file mode 100644 index 0000000000..3ed060c872 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_disv01b/disv01b/mfsim.nam @@ -0,0 +1,19 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/14/2025 at 13:12:59. +BEGIN options +END options + +BEGIN timing + TDIS6 disv01b.tdis +END timing + +BEGIN models + gwf6 disv01b.nam disv01b +END models + +BEGIN exchanges +END exchanges + +BEGIN solutiongroup 1 + ims6 disv01b.ims disv01b +END solutiongroup 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.chd b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.chd new file mode 100644 index 0000000000..c25bd8a655 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.chd @@ -0,0 +1,15 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options +END options + +BEGIN dimensions + MAXBOUND 4 +END dimensions + +BEGIN period 1 + 1 10 4 0.00000000E+00 + 1 10 5 0.00000000E+00 + 1 10 6 0.00000000E+00 + 1 10 7 0.00000000E+00 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.dis b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.dis new file mode 100644 index 0000000000..89f0cd9758 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.dis @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN dimensions + NLAY 3 + NROW 10 + NCOL 10 +END dimensions + +BEGIN griddata + delr NETCDF + delc NETCDF + top NETCDF + botm NETCDF +END griddata + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ic b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ic new file mode 100644 index 0000000000..b5a590efc1 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ic @@ -0,0 +1,7 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + strt NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ims b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ims new file mode 100644 index 0000000000..bc0d875743 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.ims @@ -0,0 +1,21 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options + PRINT_OPTION summary +END options + +BEGIN nonlinear + OUTER_DVCLOSE 1.00000000E-09 + OUTER_MAXIMUM 500 + UNDER_RELAXATION none +END nonlinear + +BEGIN linear + INNER_MAXIMUM 300 + INNER_DVCLOSE 1.00000000E-09 + inner_rclose 1.00000000E-06 + LINEAR_ACCELERATION bicgstab + RELAXATION_FACTOR 1.00000000 + SCALING_METHOD none + REORDERING_METHOD none +END linear + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.in.nc b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.in.nc new file mode 100644 index 0000000000..54520330b2 Binary files /dev/null and b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.in.nc differ diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.nam b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.nam new file mode 100644 index 0000000000..dce4fcaa57 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.nam @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options + SAVE_FLOWS + NEWTON + NETCDF FILEIN gwf_sto01.in.nc +END options + +BEGIN packages + DIS6 gwf_sto01.dis dis + IC6 gwf_sto01.ic ic + NPF6 gwf_sto01.npf npf + STO6 gwf_sto01.sto sto + RCH6 gwf_sto01.rcha rcha_0 + WEL6 gwf_sto01.wel wel_0 + CHD6 gwf_sto01.chd chd_0 + OC6 gwf_sto01.oc oc +END packages diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.npf b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.npf new file mode 100644 index 0000000000..26864aa6da --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.npf @@ -0,0 +1,9 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + icelltype NETCDF + k NETCDF + k33 NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.oc b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.oc new file mode 100644 index 0000000000..3041e7820b --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.oc @@ -0,0 +1,14 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options + BUDGET FILEOUT gwf_sto01.cbc + HEAD FILEOUT gwf_sto01.hds + HEAD PRINT_FORMAT COLUMNS 10 WIDTH 15 DIGITS 6 GENERAL +END options + +BEGIN period 1 + SAVE HEAD ALL + SAVE BUDGET ALL + PRINT HEAD LAST + PRINT BUDGET ALL +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.rcha b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.rcha new file mode 100644 index 0000000000..cb8d68d902 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.rcha @@ -0,0 +1,20 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options + READASARRAYS +END options + +BEGIN period 1 + recharge + INTERNAL FACTOR 1.0 + 0.00150000 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.sto b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.sto new file mode 100644 index 0000000000..008831583d --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.sto @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + iconvert NETCDF + ss NETCDF + sy NETCDF +END griddata + +BEGIN period 1 + STEADY-STATE +END period 1 + +BEGIN period 2 + TRANSIENT +END period 2 diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.tdis b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.tdis new file mode 100644 index 0000000000..c3b6324fcb --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.tdis @@ -0,0 +1,44 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options + TIME_UNITS days + START_DATE_TIME 2041-01-01t00:00:00-05:00 +END options + +BEGIN dimensions + NPER 31 +END dimensions + +BEGIN perioddata + 1.00000000 1 1.00000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 +END perioddata + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.wel b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.wel new file mode 100644 index 0000000000..aca574d236 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/gwf_sto01.wel @@ -0,0 +1,17 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options + PRINT_INPUT + PRINT_FLOWS +END options + +BEGIN dimensions + MAXBOUND 4 +END dimensions + +BEGIN period 2 + 3 3 6 -1.40000000E+04 + 3 3 7 -8.00000000E+03 + 3 4 6 -5.00000000E+03 + 3 4 7 -3.00000000E+03 +END period 2 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/mfsim.nam b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/mfsim.nam new file mode 100644 index 0000000000..1c341a0e2b --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_mesh/gwf_sto01/mfsim.nam @@ -0,0 +1,19 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:11. +BEGIN options +END options + +BEGIN timing + TDIS6 gwf_sto01.tdis +END timing + +BEGIN models + gwf6 gwf_sto01.nam gwf_sto01 +END models + +BEGIN exchanges +END exchanges + +BEGIN solutiongroup 1 + ims6 gwf_sto01.ims gwf_sto01 +END solutiongroup 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.chd b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.chd new file mode 100644 index 0000000000..ee18acbc12 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.chd @@ -0,0 +1,15 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options +END options + +BEGIN dimensions + MAXBOUND 4 +END dimensions + +BEGIN period 1 + 1 10 4 0.00000000E+00 + 1 10 5 0.00000000E+00 + 1 10 6 0.00000000E+00 + 1 10 7 0.00000000E+00 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.dis b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.dis new file mode 100644 index 0000000000..89f0cd9758 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.dis @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN dimensions + NLAY 3 + NROW 10 + NCOL 10 +END dimensions + +BEGIN griddata + delr NETCDF + delc NETCDF + top NETCDF + botm NETCDF +END griddata + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ic b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ic new file mode 100644 index 0000000000..b5a590efc1 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ic @@ -0,0 +1,7 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + strt NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ims b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ims new file mode 100644 index 0000000000..eb17394683 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.ims @@ -0,0 +1,21 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options + PRINT_OPTION summary +END options + +BEGIN nonlinear + OUTER_DVCLOSE 1.00000000E-09 + OUTER_MAXIMUM 500 + UNDER_RELAXATION none +END nonlinear + +BEGIN linear + INNER_MAXIMUM 300 + INNER_DVCLOSE 1.00000000E-09 + inner_rclose 1.00000000E-06 + LINEAR_ACCELERATION bicgstab + RELAXATION_FACTOR 1.00000000 + SCALING_METHOD none + REORDERING_METHOD none +END linear + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.in.nc b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.in.nc new file mode 100644 index 0000000000..0611673bef Binary files /dev/null and b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.in.nc differ diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.nam b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.nam new file mode 100644 index 0000000000..dce4fcaa57 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.nam @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options + SAVE_FLOWS + NEWTON + NETCDF FILEIN gwf_sto01.in.nc +END options + +BEGIN packages + DIS6 gwf_sto01.dis dis + IC6 gwf_sto01.ic ic + NPF6 gwf_sto01.npf npf + STO6 gwf_sto01.sto sto + RCH6 gwf_sto01.rcha rcha_0 + WEL6 gwf_sto01.wel wel_0 + CHD6 gwf_sto01.chd chd_0 + OC6 gwf_sto01.oc oc +END packages diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.npf b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.npf new file mode 100644 index 0000000000..26864aa6da --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.npf @@ -0,0 +1,9 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + icelltype NETCDF + k NETCDF + k33 NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.oc b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.oc new file mode 100644 index 0000000000..233b5286d4 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.oc @@ -0,0 +1,14 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options + BUDGET FILEOUT gwf_sto01.cbc + HEAD FILEOUT gwf_sto01.hds + HEAD PRINT_FORMAT COLUMNS 10 WIDTH 15 DIGITS 6 GENERAL +END options + +BEGIN period 1 + SAVE HEAD ALL + SAVE BUDGET ALL + PRINT HEAD LAST + PRINT BUDGET ALL +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.rcha b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.rcha new file mode 100644 index 0000000000..872af44d8f --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.rcha @@ -0,0 +1,20 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options + READASARRAYS +END options + +BEGIN period 1 + recharge + INTERNAL FACTOR 1.0 + 0.00150000 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00150000 + 0.00150000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 + 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 0.00000000 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.sto b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.sto new file mode 100644 index 0000000000..008831583d --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.sto @@ -0,0 +1,17 @@ +# NOT generated by Flopy +BEGIN options +END options + +BEGIN griddata + iconvert NETCDF + ss NETCDF + sy NETCDF +END griddata + +BEGIN period 1 + STEADY-STATE +END period 1 + +BEGIN period 2 + TRANSIENT +END period 2 diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.tdis b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.tdis new file mode 100644 index 0000000000..90d969ac27 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.tdis @@ -0,0 +1,44 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options + TIME_UNITS days + START_DATE_TIME 2041-01-01t00:00:00-05:00 +END options + +BEGIN dimensions + NPER 31 +END dimensions + +BEGIN perioddata + 1.00000000 1 1.00000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 + 365.25000000 6 1.30000000 +END perioddata + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.wel b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.wel new file mode 100644 index 0000000000..86dc3dc414 --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/gwf_sto01.wel @@ -0,0 +1,17 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options + PRINT_INPUT + PRINT_FLOWS +END options + +BEGIN dimensions + MAXBOUND 4 +END dimensions + +BEGIN period 2 + 3 3 6 -1.40000000E+04 + 3 3 7 -8.00000000E+03 + 3 4 6 -5.00000000E+03 + 3 4 7 -3.00000000E+03 +END period 2 + diff --git a/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/mfsim.nam b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/mfsim.nam new file mode 100644 index 0000000000..6bcebf8d3e --- /dev/null +++ b/examples/data/mf6/netcdf/test_gwf_sto01_structured/gwf_sto01/mfsim.nam @@ -0,0 +1,19 @@ +# File generated by Flopy version 3.10.0.dev1 on 01/06/2025 at 14:17:14. +BEGIN options +END options + +BEGIN timing + TDIS6 gwf_sto01.tdis +END timing + +BEGIN models + gwf6 gwf_sto01.nam gwf_sto01 +END models + +BEGIN exchanges +END exchanges + +BEGIN solutiongroup 1 + ims6 gwf_sto01.ims gwf_sto01 +END solutiongroup 1 + diff --git a/examples/data/mf6/netcdf/test_utlncf_create/disv01b/compare/disv01b.in.nc b/examples/data/mf6/netcdf/test_utlncf_create/disv01b/compare/disv01b.in.nc new file mode 100644 index 0000000000..ca87d5aed3 Binary files /dev/null and b/examples/data/mf6/netcdf/test_utlncf_create/disv01b/compare/disv01b.in.nc differ diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.chd b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.chd new file mode 100644 index 0000000000..bbf900dd25 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.chd @@ -0,0 +1,13 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options +END options + +BEGIN dimensions + MAXBOUND 2 +END dimensions + +BEGIN period 1 + 1 1 1.00000000E+00 + 1 9 0.00000000E+00 +END period 1 + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv new file mode 100644 index 0000000000..5cd9b5d632 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv @@ -0,0 +1,49 @@ +# NOT generated by Flopy +BEGIN options + EXPORT_ARRAY_NETCDF + NCF6 FILEIN disv01b.disv.ncf +END options + +BEGIN dimensions + NLAY 3 + NCPL 9 + NVERT 16 +END dimensions + +BEGIN griddata + top NETCDF + botm NETCDF + idomain NETCDF +END griddata + +BEGIN vertices + 1 1.00000000E+08 1.00000030E+08 + 2 1.00000010E+08 1.00000030E+08 + 3 1.00000020E+08 1.00000030E+08 + 4 1.00000030E+08 1.00000030E+08 + 5 1.00000000E+08 1.00000020E+08 + 6 1.00000010E+08 1.00000020E+08 + 7 1.00000020E+08 1.00000020E+08 + 8 1.00000030E+08 1.00000020E+08 + 9 1.00000000E+08 1.00000010E+08 + 10 1.00000010E+08 1.00000010E+08 + 11 1.00000020E+08 1.00000010E+08 + 12 1.00000030E+08 1.00000010E+08 + 13 1.00000000E+08 1.00000000E+08 + 14 1.00000010E+08 1.00000000E+08 + 15 1.00000020E+08 1.00000000E+08 + 16 1.00000030E+08 1.00000000E+08 +END vertices + +BEGIN cell2d + 1 1.00000005E+08 1.00000025E+08 4 1 2 6 5 + 2 1.00000015E+08 1.00000025E+08 4 2 3 7 6 + 3 1.00000025E+08 1.00000025E+08 4 3 4 8 7 + 4 1.00000005E+08 1.00000015E+08 4 5 6 10 9 + 5 1.00000015E+08 1.00000015E+08 4 6 7 11 10 + 6 1.00000025E+08 1.00000015E+08 4 7 8 12 11 + 7 1.00000005E+08 1.00000005E+08 4 9 10 14 13 + 8 1.00000015E+08 1.00000005E+08 4 10 11 15 14 + 9 1.00000025E+08 1.00000005E+08 4 11 12 16 15 +END cell2d + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv.ncf b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv.ncf new file mode 100644 index 0000000000..49c0c6c8b0 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.disv.ncf @@ -0,0 +1,9 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options + wkt 'PROJCS["NAD83 / UTM zone 18N", GEOGCS["NAD83", DATUM["North_American_Datum_1983", SPHEROID["GRS 1980",6378137,298.257222101], TOWGS84[0,0,0,0,0,0,0]], PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4269"]], PROJECTION["Transverse_Mercator"], PARAMETER["latitude_of_origin",0], PARAMETER["central_meridian",-75], PARAMETER["scale_factor",0.9996], PARAMETER["false_easting",500000], PARAMETER["false_northing",0], UNIT["metre",1,AUTHORITY["EPSG","9001"]], AXIS["Easting",EAST], AXIS["Northing",NORTH], AUTHORITY["EPSG","26918"]]' + DEFLATE 9 + SHUFFLE + CHUNK_TIME 1 + CHUNK_FACE 3 +END options + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ic b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ic new file mode 100644 index 0000000000..5ef059bd07 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ic @@ -0,0 +1,8 @@ +# NOT generated by Flopy +BEGIN options + EXPORT_ARRAY_NETCDF +END options + +BEGIN griddata + strt NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ims b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ims new file mode 100644 index 0000000000..4b8778f351 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.ims @@ -0,0 +1,5 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options + PRINT_OPTION summary +END options + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.in.nc b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.in.nc new file mode 100644 index 0000000000..ca87d5aed3 Binary files /dev/null and b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.in.nc differ diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.nam b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.nam new file mode 100644 index 0000000000..456a735e7a --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.nam @@ -0,0 +1,12 @@ +# NOT generated by Flopy +BEGIN options + NETCDF FILEIN disv01b.in.nc +END options + +BEGIN packages + DISV6 disv01b.disv disv + IC6 disv01b.ic ic + NPF6 disv01b.npf npf + CHD6 disv01b.chd chd_0 + OC6 disv01b.oc oc +END packages diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.npf b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.npf new file mode 100644 index 0000000000..381aa46400 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.npf @@ -0,0 +1,9 @@ +# NOT generated by Flopy +BEGIN options + EXPORT_ARRAY_NETCDF +END options + +BEGIN griddata + icelltype NETCDF + k NETCDF +END griddata diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.oc b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.oc new file mode 100644 index 0000000000..10a8b2774f --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.oc @@ -0,0 +1,9 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options + HEAD FILEOUT disv01b.hds +END options + +BEGIN period 1 + SAVE HEAD ALL +END period 1 + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.tdis b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.tdis new file mode 100644 index 0000000000..68234fa8b2 --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/disv01b.tdis @@ -0,0 +1,13 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options + START_DATE_TIME 2041-01-01t00:00:00-05:00 +END options + +BEGIN dimensions + NPER 1 +END dimensions + +BEGIN perioddata + 1.00000000 1 1.00000000 +END perioddata + diff --git a/examples/data/mf6/netcdf/test_utlncf_load/disv01b/mfsim.nam b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/mfsim.nam new file mode 100644 index 0000000000..b1e7500a5b --- /dev/null +++ b/examples/data/mf6/netcdf/test_utlncf_load/disv01b/mfsim.nam @@ -0,0 +1,19 @@ +# File generated by Flopy version 3.10.0.dev1 on 02/05/2025 at 13:05:06. +BEGIN options +END options + +BEGIN timing + TDIS6 disv01b.tdis +END timing + +BEGIN models + gwf6 disv01b.nam disv01b +END models + +BEGIN exchanges +END exchanges + +BEGIN solutiongroup 1 + ims6 disv01b.ims disv01b +END solutiongroup 1 + diff --git a/flopy/mf6/data/mfdataarray.py b/flopy/mf6/data/mfdataarray.py index e00c877b1a..0775865a31 100644 --- a/flopy/mf6/data/mfdataarray.py +++ b/flopy/mf6/data/mfdataarray.py @@ -609,6 +609,12 @@ def store_internal( == DataStorageType.external_file ): layer_list.append(index) + if ( + storage.netcdf + and len(layer_list) == 1 + and layer_list[0] == 0 + ): + layer_list[0] = -1 else: if ( storage.layer_storage[layer].data_storage_type @@ -1063,6 +1069,14 @@ def load( external_file_info=None, ) self._resync() + nc_dataset=None + if self.structure.netcdf: + if ( + hasattr(self._model_or_sim, "_nc_dataset") + and len(first_line.split()) > 1 + and first_line.split()[1].lower() == "netcdf" + ): + nc_dataset = self._model_or_sim._nc_dataset if ( self.structure.layered and self.structure.name.lower() != "aux" @@ -1102,6 +1116,9 @@ def load( external_file_info, storage.layer_storage.get_total_size() - 1 ) return_val = [False, None] + elif nc_dataset is not None: + self._set_storage_netcdf(nc_dataset) + return_val = [False, None] else: file_access = MFFileAccessArray( self.structure, @@ -1160,7 +1177,7 @@ def _get_file_entry( if ( data_storage is None or data_storage.layer_storage.get_total_size() == 0 - or not data_storage.has_data() + or (not data_storage.has_data() and not data_storage.netcdf) ): return "" @@ -1206,7 +1223,7 @@ def _get_file_entry( f"{indent}{self.structure.name}{indent}{data}\n" ) elif data_storage.layered: - if not layered_aux: + if not layered_aux and not data_storage.netcdf: if not self.structure.data_item_structures[0].just_data: name = self.structure.name file_entry_array.append(f"{indent}{name}{indent}LAYERED\n") @@ -1241,7 +1258,11 @@ def _get_file_entry( layer_min = layer layer_max = shape_ml.inc_shape_idx(layer) - if layered_aux: + + if data_storage.netcdf: + file_entry_array.append(f"{indent}{self.structure.name}{indent}NETCDF\n") + + elif layered_aux: aux_var_names = ( self.data_dimensions.package_dim.get_aux_variables()[0] ) @@ -1274,15 +1295,18 @@ def _get_file_entry( file_entry_array.append( f"{indent}{self._get_aux_var_name([0])}\n" ) + elif data_storage.netcdf: + file_entry_array.append(f"{indent}{self.structure.name}{indent}NETCDF\n") else: file_entry_array.append(f"{indent}{self.structure.name}\n") data_storage_type = data_storage.layer_storage[0].data_storage_type - file_entry_array.append( - self._get_file_entry_layer( - None, data_indent, data_storage_type, ext_file_action + if not data_storage.netcdf: + file_entry_array.append( + self._get_file_entry_layer( + None, data_indent, data_storage_type, ext_file_action + ) ) - ) return "".join(file_entry_array) @@ -1395,6 +1419,9 @@ def _get_file_entry_layer( const_val, layer, self._data_type ).upper() file_entry = f"{file_entry}{indent_string}{const_str}" + elif self._get_storage_obj().netcdf: + indent = self._simulation_data.indent_string + file_entry = f"{indent}{self.structure.name}{indent_string}NETCDF\n" else: # external data ext_str = self._get_external_formatting_string( @@ -1590,6 +1617,23 @@ def plot( return axes + def _set_storage_netcdf(self, nc_dataset, create=False): + if create: + # add array to netcdf dataset + # TODO: update to use pname for (stress) multi-packages + nc_dataset.create_array( + self.structure.get_package(), + self.structure.name.lower(), + self._get_data(), + self.structure.shape, + self.structure.longname, + ) + + storage = self._get_storage_obj() + storage._set_storage_netcdf( + nc_dataset, self.structure.layered, storage.layer_storage.get_total_size() - 1 + ) + class MFTransientArray(MFArray, MFTransient): """ diff --git a/flopy/mf6/data/mfdataplist.py b/flopy/mf6/data/mfdataplist.py index 331177ef08..a77cacb4ba 100644 --- a/flopy/mf6/data/mfdataplist.py +++ b/flopy/mf6/data/mfdataplist.py @@ -221,10 +221,13 @@ def __init__( self._data_types = None self._data_item_names = None self._mg = None + self._grid_type = None self._current_key = 0 self._max_file_size = 1000000000000000 if self._model_or_sim.type == "Model": - self._mg = self._model_or_sim.modelgrid + self._grid_type = self._model_or_sim.get_grid_type() + if hasattr(self._model_or_sim, "modelgrid"): + self._mg = self._model_or_sim.modelgrid if data is not None: try: @@ -302,7 +305,7 @@ def _add_cellid_fields(self, data, keep_existing=False): if data_item.type == DatumType.integer: if data_item.name.lower() == "cellid": columns = data.columns.tolist() - if isinstance(self._mg, StructuredGrid): + if self._grid_type == DiscretizationType.DIS: if ( "cellid_layer" in columns and "cellid_row" in columns @@ -414,7 +417,7 @@ def _build_data_header(self): if data_item.name.lower() == "cellid": # get the appropriate cellid column headings for the # model's discretization type - if isinstance(self._mg, StructuredGrid): + if self._grid_type == DiscretizationType.DIS: self._append_type_list( "cellid_layer", i_type, True ) @@ -431,7 +434,7 @@ def _build_data_header(self): self._append_type_list("cellid_node", i_type, True) else: raise MFDataException( - "ERROR: Unrecognized model grid " + f"ERROR: Unrecognized model grid " "{str(self._mg)} not supported by MFBasicList" ) else: @@ -505,7 +508,7 @@ def _untuple_cellids(self, pdata): # fix columns for field_idx, column_name in fields_to_correct: # add individual layer/row/column/cell/node columns - if isinstance(self._mg, StructuredGrid): + if self._grid_type == DiscretizationType.DIS: try: pdata.insert( loc=field_idx, @@ -1468,7 +1471,7 @@ def _update_id_fields(self, id_fields, data_item_struct, data_frame): if data_item_struct.numeric_index or data_item_struct.is_cellid: name = data_item_struct.name.lower() if name.startswith("cellid"): - if isinstance(self._mg, StructuredGrid): + if self._grid_type == DiscretizationType.DIS: id_fields.append(f"{name}_layer") id_fields.append(f"{name}_row") id_fields.append(f"{name}_column") diff --git a/flopy/mf6/data/mfdatastorage.py b/flopy/mf6/data/mfdatastorage.py index afcf2596ae..af59b01ec3 100644 --- a/flopy/mf6/data/mfdatastorage.py +++ b/flopy/mf6/data/mfdatastorage.py @@ -71,6 +71,8 @@ class LayerStorage: print code binary : bool whether the data is stored in a binary file + nc_dataset : ModelNetCDFDataset or None + model netcdf dataset Methods ------- @@ -115,6 +117,7 @@ def __init__( self.factor = 1.0 self.iprn = None self.binary = False + self.nc_dataset = None def set_internal_constant(self): self.data_storage_type = DataStorageType.internal_constant @@ -203,6 +206,8 @@ class DataStorage: what internal type is the data stored in (ndarray, recarray, scalar) layered : bool is the data layered + netcdf : bool + is the data stored in netcdf pre_data_comments : string any comments before the start of the data comments : dict @@ -327,6 +332,7 @@ def __init__( self.build_type_list(resolve_data_shape=False) self.layered = layered + self.netcdf = False # initialize comments self.pre_data_comments = None @@ -1718,7 +1724,7 @@ def store_external( fp = self._simulation_data.mfpath.resolve_path( fp_relative, model_name ) - else: + elif fp_relative is not None: fp = os.path.join( self._simulation_data.mfpath.get_sim_path(), fp_relative ) @@ -1832,12 +1838,22 @@ def store_external( self._data_path, self._stress_period, ) - file_access.write_text_file( - data, - fp, - data_type, - data_size, - ) + + if self.netcdf: + file_access.set_netcdf_array( + self.layer_storage[layer].nc_dataset, + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.name, + data, + layer, + ) + else: + file_access.write_text_file( + data, + fp, + data_type, + data_size, + ) if not preserve_record: self.layer_storage[layer_new].factor = multiplier self.layer_storage[layer_new].internal_data = None @@ -1871,6 +1887,14 @@ def point_to_existing_external_file(self, arr_line, layer): self.set_ext_file_attributes(layer, data_file, print_format, binary) self.layer_storage[layer].factor = multiplier + def _set_storage_netcdf(self, nc_dataset, layered, layer): + self.netcdf = True + for idx in self.layer_storage.indexes(): + self.layer_storage[idx].nc_dataset = nc_dataset + self.layer_storage[ + idx + ].data_storage_type = DataStorageType.external_file + def external_to_external( self, new_external_file, multiplier=None, layer=None, binary=None ): @@ -1986,6 +2010,16 @@ def external_to_internal( self._data_type, self._model_or_sim.modeldiscrit, )[0] + elif self.layer_storage[layer].nc_dataset is not None: + data_out = ( + file_access.read_netcdf_array( + self.layer_storage[layer].nc_dataset, + self._model_or_sim.modeldiscrit, + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.name, + layer, + ) + ) else: data_out = file_access.read_text_data_from_file( self.get_data_size(layer), @@ -2442,6 +2476,18 @@ def _build_full_data(self, apply_multiplier=False): )[0] * mult ) + elif self.layer_storage[layer].nc_dataset is not None: + data_out = ( + file_access.read_netcdf_array( + self.layer_storage[layer].nc_dataset, + self._model_or_sim.modeldiscrit, + self.data_dimensions.structure.get_package(), + self.data_dimensions.structure.name, + -1, + + ) + * mult + ) else: data_out = ( file_access.read_text_data_from_file( @@ -3015,7 +3061,10 @@ def _layer_prep(self, layer): if layer is None: # layer is none means the data provided is for all layers or this # is not layered data - layer = (0,) + if self.netcdf: + layer = (-1,) + else: + layer = (0,) self.layer_storage.list_shape = (1,) self.layer_storage.multi_dim_list = [ self.layer_storage.first_item() @@ -3042,7 +3091,7 @@ def _store_prep(self, layer, multiplier): self._simulation_data.debug, ) layer = self._layer_prep(layer) - if multiplier is None: + if multiplier is None or self.netcdf: multiplier = self.get_default_mult() else: if isinstance(multiplier, float): diff --git a/flopy/mf6/data/mffileaccess.py b/flopy/mf6/data/mffileaccess.py index c4429a82a4..98becb6a42 100644 --- a/flopy/mf6/data/mffileaccess.py +++ b/flopy/mf6/data/mffileaccess.py @@ -513,6 +513,57 @@ def read_binary_data_from_file( fd.close() return bin_data + def read_netcdf_array( + self, + nc_dataset, + modelgrid, + package, + param, + layer, + ): + if isinstance(layer, tuple): + layer = layer[0] + + # read all data if not layered + if layer == 0 and not self.structure.layered: + layer = -1 + + # retrieve data + data = nc_dataset.array(package, param, layer) + + if ( + nc_dataset.nc_type == "mesh2d" + and modelgrid.grid_type == "structured" + ): + # reshape 1d and 2d data associated with mesh + if ( + len(self.structure.shape) == 3 + #or self.structure.shape[0] == "nodes" + ): + if layer > -1: + data = data.reshape(modelgrid.nrow, modelgrid.ncol) + else: + data = data.reshape(modelgrid.nlay, modelgrid.nrow, modelgrid.ncol) + elif len(self.structure.shape) == 2: + data = data.reshape(modelgrid.nrow, modelgrid.ncol) + else: + data = nc_dataset.array(package, param, layer) + + return data + + def set_netcdf_array( + self, + nc_dataset, + package, + param, + data, + layer, + ): + if isinstance(layer, tuple): + layer = layer[0] + + nc_dataset.set_array(package, param, data, layer) + def get_data_string(self, data, data_type, data_indent=""): layer_data_string = [str(data_indent)] line_data_count = 0 diff --git a/flopy/mf6/data/mfstructure.py b/flopy/mf6/data/mfstructure.py index 9ca9afc557..356e667e66 100644 --- a/flopy/mf6/data/mfstructure.py +++ b/flopy/mf6/data/mfstructure.py @@ -594,6 +594,7 @@ def __init__(self): self.parameter_name = None self.one_per_pkg = False self.jagged_array = None + self.netcdf = False def set_value(self, line, common): arr_line = line.strip().split() @@ -771,6 +772,8 @@ def set_value(self, line, common): self.one_per_pkg = bool(arr_line[1]) elif arr_line[0] == "jagged_array": self.jagged_array = arr_line[1] + elif arr_line[0] == "netcdf": + self.netcdf = arr_line[1] def get_type_string(self): return f"[{self.type_string}]" @@ -1089,6 +1092,7 @@ def __init__(self, data_item, model_data, package_type, dfn_list): or "nodes" in data_item.shape or len(data_item.layer_dims) > 1 ) + self.netcdf = data_item.netcdf self.num_data_items = len(data_item.data_items) self.record_within_record = False self.file_data = False diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index 54c6f49ba4..f21df8073d 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -152,6 +152,8 @@ def __init__( _internal_package=True, ) + self._nc_dataset = None + def __init_subclass__(cls): """Register model type""" super().__init_subclass__() @@ -927,6 +929,33 @@ def load_base( # load name file instance.name_file.load(strict) + if hasattr(instance.name_file, "nc_filerecord"): + nc_filerecord = instance.name_file.nc_filerecord.get_data() + if nc_filerecord: + from ..utils.model_netcdf import open_dataset + + grid_str = { + "dis6": "structured", + "disv6": "vertex", + } + dis_type = None + for t in instance.name_file.packages.get_data(): + if t[0].lower().startswith("dis"): + dis_type = t[0].lower() + break + if dis_type and dis_type in grid_str: + nc_fpth = os.path.join(instance.model_ws, nc_filerecord[0][0]) + instance._nc_dataset = open_dataset(nc_fpth, grid_type=grid_str[dis_type]) + else: + message = ( + "Invalid discretization type " + f"provided for model {modelname} " + "NetCDF input" + ) + raise MFDataException( + model=modelname, + message=message, + ) # order packages vnum = mfstructure.MFStructure().get_version_string() @@ -1296,7 +1325,11 @@ def _format_data_entry(data_entry): else: return f",{data_entry}\n" - def write(self, ext_file_action=ExtFileAction.copy_relative_paths): + def write( + self, + ext_file_action=ExtFileAction.copy_relative_paths, + netcdf=None, + ): """ Writes out model's package files. @@ -1306,9 +1339,51 @@ def write(self, ext_file_action=ExtFileAction.copy_relative_paths): Defines what to do with external files when the simulation path has changed. defaults to copy_relative_paths which copies only files with relative paths, leaving files defined by absolute paths fixed. - + netcdf : str + Create model NetCDF file, of type specified, in which to store + package griddata. 'mesh2d' and 'structured' are supported types. """ + # write netcdf file + if (netcdf or self._nc_dataset is not None) and ( + self.model_type == "gwf6" + or self.model_type == "gwt6" + or self.model_type == "gwe6" + ): + kwargs = {} + if self._nc_dataset is None: + from ..utils.model_netcdf import create_dataset + + # set netcdf file name + nc_fname = f"{self.name}.in.nc" + + # update name file to read from netcdf + self.name_file.nc_filerecord = nc_fname + + # create netcdf dataset + self._nc_dataset = create_dataset( + self.model_type, self.name, netcdf, nc_fname, self.modelgrid + ) + + # reset data storage and populate netcdf file + for pp in self.packagelist: + if pp.package_type == "ncf": + kwargs["shuffle"] = pp.shuffle.get_data() + kwargs["deflate"] = pp.deflate.get_data() + kwargs["chunk_time"] = pp.chunk_time.get_data() + kwargs["chunk_face"] = pp.chunk_face.get_data() + kwargs["chunk_x"] = pp.chunk_x.get_data() + kwargs["chunk_y"] = pp.chunk_y.get_data() + kwargs["chunk_z"] = pp.chunk_z.get_data() + kwargs["wkt"] = pp.wkt.get_data() + + pp._set_netcdf_storage( + self._nc_dataset, create=True + ) + + # write the dataset to netcdf + self._nc_dataset.write(self.model_ws, **kwargs) + # write name file if ( self.simulation_data.verbosity_level.value @@ -1850,6 +1925,13 @@ def set_all_data_internal(self, check_data=True): for package in self.packagelist: package.set_all_data_internal(check_data) + if ( + hasattr(self, "_nc_dataset") + and self._nc_dataset is not None + ): + self._nc_dataset.close() + self._nc_dataset = None + def register_package( self, package, diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index 4ee9d61756..8ac7d9e6f2 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -1685,6 +1685,33 @@ def is_valid(self): if dataset.enabled and not dataset.is_valid(): return False + def _set_netcdf_storage(self, nc_dataset, create=False): + """Set the dataset storage to netcdf if supported for the dataset. + + Parameters + ---------- + nc_dataset : ModelNetCDFDataset + The dataset in which to store the data. + create : bool + Create the dataset data variable. + + """ + + for key, dataset in self.datasets.items(): + if isinstance(dataset, mfdataarray.MFArray): + if dataset.structure.netcdf and dataset.has_data(): + try: + dataset._set_storage_netcdf(nc_dataset, create) + except MFDataException as mfde: + raise MFDataException( + mfdata_except=mfde, + model=self._container_package.model_name, + package=self._container_package._get_pname(), + message="Error setting netcdf storage: " + ' data of dataset "{}" in block ' + '"{}"'.format(dataset.structure.name, self.structure.name), + ) + class MFPackage(PackageInterface): """ @@ -3422,6 +3449,24 @@ def plot(self, **kwargs): axes = PlotUtilities._plot_package_helper(self, **kwargs) return axes + def _set_netcdf_storage(self, nc_dataset, create=False): + """Set griddata array dataset storage to netcdf. + + Parameters + ---------- + nc_dataset : ModelNetCDFDataset + The dataset in which to store the data. + create : bool + Create the dataset data variable. + + """ + + # update blocks + for key, block in self.blocks.items(): + # TODO: if key == "griddata" or key == "period": + if key == "griddata": + block._set_netcdf_storage(nc_dataset, create) + class MFChildPackages: """ diff --git a/flopy/mf6/mfsimbase.py b/flopy/mf6/mfsimbase.py index 9546219ad0..f8103fd436 100644 --- a/flopy/mf6/mfsimbase.py +++ b/flopy/mf6/mfsimbase.py @@ -1655,7 +1655,10 @@ def set_all_data_internal(self, check_data=True): package.set_all_data_internal(check_data) def write_simulation( - self, ext_file_action=ExtFileAction.copy_relative_paths, silent=False + self, + ext_file_action=ExtFileAction.copy_relative_paths, + silent=False, + netcdf=None, ): """ Write the simulation to files. @@ -1668,7 +1671,9 @@ def write_simulation( by absolute paths fixed. silent : bool Writes out the simulation in silent mode (verbosity_level = 0) - + netcdf : str + Create model NetCDF file, of type specified, in which to store + package griddata. 'mesh2d' and 'structured' are supported types. """ sim_data = self.simulation_data if not sim_data.max_columns_user_set: @@ -1735,7 +1740,10 @@ def write_simulation( >= VerbosityLevel.normal.value ): print(f" writing model {model.name}...") - model.write(ext_file_action=ext_file_action) + model.write( + ext_file_action=ext_file_action, + netcdf=netcdf, + ) self.simulation_data.mfpath.set_last_accessed_path() diff --git a/flopy/mf6/utils/codegen/filters.py b/flopy/mf6/utils/codegen/filters.py index 858dad19e2..ae8cd215ae 100644 --- a/flopy/mf6/utils/codegen/filters.py +++ b/flopy/mf6/utils/codegen/filters.py @@ -317,7 +317,7 @@ def _meta(): def __dfn(): def _var(var: dict) -> List[str]: - exclude = ["longname", "description"] + exclude = ["description"] name = var["name"] subpkg = dfn.get("fkeys", dict()).get(name, None) if subpkg: diff --git a/flopy/utils/model_netcdf.py b/flopy/utils/model_netcdf.py new file mode 100644 index 0000000000..e11a3fc879 --- /dev/null +++ b/flopy/utils/model_netcdf.py @@ -0,0 +1,1069 @@ +import re +import time +import warnings +from pathlib import Path +from typing import Optional + +import numpy as np +import xarray as xr +from pyproj import CRS, Proj + +import flopy + +from ..discretization.grid import Grid +from ..discretization.structuredgrid import StructuredGrid +from ..discretization.unstructuredgrid import UnstructuredGrid +from ..discretization.vertexgrid import VertexGrid + +FILLNA_INT32 = np.int32(-2147483647) +FILLNA_DBL = 9.96920996838687e36 +lenunits = {0: "m", 1: "ft", 2: "m", 3: "m"} + + +class ModelNetCDFDataset: + """ + These objects are intended to support loading, creating and + updating model input NetCDF files. + + Newly created datasets define coordinate or mesh variables + corresponding to the type of NetCDF file specified. When + the discretization crs attribute is valid, a projection + variable is also added to the dataset. + + Data will be associated with the grid and projection when + the relevant interfaces, e.g. create_array() and write(), + are used. Additional interfaces are provided to modify + and read existing data. + + Additionally, these files can can be used as MODFLOW 6 + model inputs for variables that define internal attributes + designated for that purpose, specifically "modflow6_input" + and "modflow6_layer". These attributes are managed internally + for MODFLOW 6 models when the supported data interfaces are + used. + """ + + def __init__(self): + self._modelname = None + self._modeltype = None + self._dataset = None + self._modelgrid = None + self._tags = None + self._fname = None + self._nc_type = None + + @property + def gridtype(self): + if self._nc_type == "mesh2d": + return "LAYERED MESH" + elif self._nc_type == "structured": + return "STRUCTURED" + + return "" + + @property + def modelname(self): + return self._modelname + + @property + def dataset(self): + return self._dataset + + @property + def nc_type(self): + return self._nc_type + + @property + def layered(self): + res = False + if self.nc_type == "mesh2d": + res = True + + return res + + def open(self, nc_fpth: str) -> None: + """ + Open an existing dataset. Assumes the dataset has been annotated + with the necessary attributes to read and update, including global + attributes modflow_model and modflow_grid. + + Args: + nc_fpth (str): Path to an existing NetCDF file. + """ + fpth = Path(nc_fpth).resolve() + self._fname = fpth.name + + self._dataset = xr.open_dataset(fpth, engine="netcdf4") + self._set_mapping() + + self._dataset.attrs["source"] = f"flopy v{flopy.__version__}" + history = self._dataset.attrs["history"] + self._dataset.attrs["history"] = f"{history}; updated {time.ctime(time.time())}" + + def create( + self, modeltype: str, modelname: str, nc_type: str, fname: str, modelgrid: Grid + ) -> None: + """ + Create a new dataset. + + Args: + modeltype (str): A model type, e.g. GWF6. + modelname (str): The model name. + nc_type (str): A supported NetCDF file type: mesh2d or structured. + fname (str): The generated NetCDF file name. + modelgrid (Grid): A FloPy derived discretization object. + """ + self._modelname = modelname.lower() + self._modeltype = modeltype.lower() + self._nc_type = nc_type.lower() + self._modelgrid = modelgrid + self._fname = fname + self._tags = {} + + if self._nc_type != "mesh2d" and self._nc_type != "structured": + raise Exception('Supported NetCDF file types are "mesh2d" and "structured"') + if isinstance(modelgrid, VertexGrid) and self._nc_type != "mesh2d": + raise Exception("VertexGrid object must use mesh2d netcdf file type") + + self._dataset = xr.Dataset() + self._set_global_attrs(modeltype, modelname) + self._set_grid(modelgrid) + # print(self._dataset.info()) + + def close(self): + self._dataset.close() + self.__init__() + + def create_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + shape: list, + longname: Optional[str], + ): + """ + Create a new array. Override this function in a derived class. + + Args: + package (str): The name of a data grouping in the file, typically + a package. Must be distinct for each grouping. If this dataset + is associated with a modflow 6 model and this is a base package + (dis, disv, npf, ic, etc.), use that name. If this is a stress + package, use the same package name that is defined in the model + name file, e.g. chd_0, chd_1 or the user defined name. + param (str): The parameter name associated with the data. If this + is a modflow 6 model this should be the same name used in a + modflow input file for the data, e.g. strt, k, k33, idomain, + icelltype, etc. + data (ArrayLike): The data. + shape (list): The named dimensions of the grid that the data is + associated with, e.g. nlay, ncol, nrow, ncpl. + longname (str, None): An optional longname for the parameter that + the data is associated with. + """ + raise NotImplementedError("create_array not implemented in base class") + + def set_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + layer: Optional[int], + ): + """ + Set data in an existing array. Override this function in a derived class. + Do not use to update variables that establish the vertices and cell + structure of the grid, e.g. delr/delc or vertex/cell parameters, only + model data associated with the grid. + + Args: + package (str): A package name provided as an argument to create_array(). + param (str): A parameter name provided as an argument to create_array(). + data (ArrayLike): The data. + layer (int, None): The layer that the data applies to. If the data + applies to the entire array or grid do not define. + """ + raise NotImplementedError("set_array not implemented in base class") + + def array(self, package: str, param: str, layer=None): + """ + Read data in an existing array. Override this function in a derived class. + + Args: + package (str): A package name provided as an argument to create_array(). + param (str): A parameter name provided as an argument to create_array(). + layer (int, None): The layer that the data applies to. If the data + applies to the entire array or grid do not define. + + Returns: + The data. + """ + raise NotImplementedError("array not implemented in base class") + + def write(self, path: str, **kwargs) -> None: + """ + Write the data set to a NetCDF file. + + Args: + path (str): A directory in which to write the file. + kwargs (dict): A dictionary of supported encodings to + apply to managed grid associated arrays. + """ + nc_fpath = Path(path) / self._fname + self._set_projection() + self._set_modflow_attrs() + encoding = self._set_encoding(**kwargs) + if encoding is not None: + self._dataset.to_netcdf( + nc_fpath, format="NETCDF4", engine="netcdf4", encoding=encoding + ) + else: + self._dataset.to_netcdf(nc_fpath, format="NETCDF4", engine="netcdf4") + + def path(self, package: str, param: str, verify=False): + path = f"{self._modelname.lower()}/{package.lower()}/{param.lower()}" + + if verify and path not in self._tags: + raise Exception(f"Managed variable path not found in dataset: {path}") + + return path + + def _set_grid(self, dis): + """ + Define grid or coordinate variables associated with the NetCDF + file type. Override this function in a derived class. + + Args: + dis (Grid): A derived FloPy discretization object. + """ + raise NotImplementedError("_set_grid not implemented in base class") + + def _set_coords(self, crs): + """ + Define coordinate variables associated with the NetCDF + file type. Override this function in a derived class. + + Args: + dis (Grid): A derived FloPy discretization object. + """ + raise NotImplementedError("_set_coords not implemented in base class") + + def _set_encoding(self, **kwargs): + if self._modelgrid is None: + return None + # encodings: { + # 'szip_coding', + # 'shuffle', + # 'fletcher32', + # 'quantize_mode', + # 'least_significant_digit', + # 'endian', + # 'szip_pixels_per_block', + # '_FillValue', + # 'compression', + # 'blosc_shuffle', + # 'zlib', + # 'significant_digits', + # 'complevel', + # 'dtype', + # 'contiguous', + # 'chunksizes'} + encoding = {} + encodes = {} + + deflate = kwargs.pop("deflate", None) + shuffle = kwargs.pop("shuffle", None) + chunk_time = kwargs.pop("chunk_time", None) + chunk_face = kwargs.pop("chunk_face", None) + chunk_x = kwargs.pop("chunk_x", None) + chunk_y = kwargs.pop("chunk_y", None) + chunk_z = kwargs.pop("chunk_z", None) + + if deflate: + encodes["zlib"] = True + encodes["complevel"] = deflate + if shuffle: + encodes["shuffle"] = True + + for path in self._tags: + for l in self._tags[path]: + if chunk_face and self._nc_type == "mesh2d": + codes = dict(encodes) + dims = self._dataset[self._tags[path][l]].dims + if "nmesh_face" in dims: + codes["chunksizes"] = [chunk_face] + encoding[self._tags[path][l]] = codes + elif self._nc_type == "structured" and chunk_x and chunk_y and chunk_z: + codes = dict(encodes) + dims = self._dataset[self._tags[path][l]].dims + if "x" in dims and "y" in dims: + if "z" in dims: + codes["chunksizes"] = [chunk_z, chunk_y, chunk_x] + else: + codes["chunksizes"] = [chunk_y, chunk_x] + encoding[self._tags[path][l]] = codes + + else: + encoding[self._tags[path][l]] = encodes + + return encoding + + def _set_projection(self): + if not self._modelgrid: + return + + crs = None + wkt = None + projection = False + if self._modelgrid.crs: + try: + crs = CRS.from_user_input(self._modelgrid.crs) + wkt = crs.to_wkt() + projection = True + except Exception as e: + warnings.warn( + f"Cannot generate CRS object from user input: {e}", + UserWarning, + ) + + # update coords based on crs + coords = self._set_coords(crs) + + # Don't define projection and grid mapping if using + # geographic coordinates in the structured type + if self._nc_type == "structured" and coords == "lon lat": + projection = False + + if projection: + # add projection variable + self._dataset = self._dataset.assign({"projection": ([], np.int32(1))}) + if self._nc_type == "structured": + self._dataset["projection"].attrs["crs_wkt"] = wkt + self._dataset["x"].attrs["grid_mapping"] = "projection" + self._dataset["y"].attrs["grid_mapping"] = "projection" + elif self._nc_type == "mesh2d": + self._dataset["projection"].attrs["wkt"] = wkt + self._dataset["mesh_node_x"].attrs["grid_mapping"] = "projection" + self._dataset["mesh_node_y"].attrs["grid_mapping"] = "projection" + self._dataset["mesh_face_x"].attrs["grid_mapping"] = "projection" + self._dataset["mesh_face_y"].attrs["grid_mapping"] = "projection" + + # set grid_mapping and coordinates attributes + for p in self._tags: + for l in self._tags[p]: + dims = self._dataset[self._tags[p][l]].dims + if (self._nc_type == "structured" and len(dims) > 1) or ( + self._nc_type == "mesh2d" + and ("nmesh_face" in dims or "nmesh_node" in dims) + ): + if projection: + self._dataset[self._tags[p][l]].attrs["grid_mapping"] = ( + "projection" + ) + if coords is not None: + self._dataset[self._tags[p][l]].attrs["coordinates"] = coords + + def _set_modflow_attrs(self): + if self._modeltype.endswith("6"): + # MODFLOW 6 attributes + for path in self._tags: + for l in self._tags[path]: + vname = self._tags[path][l] + self._dataset[vname].attrs["modflow6_input"] = path.upper() + if l > -1: + self._dataset[vname].attrs["modflow6_layer"] = np.int32(l + 1) + + def _set_mapping(self): + var_d = {} + if ("modflow_grid" and "modflow_model") not in self._dataset.attrs: + raise Exception("Invalid MODFLOW 6 netcdf dataset") + else: + self._modelname = self._dataset.attrs["modflow_model"].split(":")[0].lower() + mtype_str = self._dataset.attrs["modflow_model"].lower() + if "modflow 6" in mtype_str: + mtype = re.findall(r"\((.*?)\)", mtype_str) + if len(mtype) == 1: + self._modeltype = f"{mtype}6" + gridtype = self._dataset.attrs["modflow_grid"].lower() + if gridtype == "layered mesh": + self._nc_type = "mesh2d" + elif gridtype == "structured": + self._nc_type = "structured" + + for varname, da in self._dataset.data_vars.items(): + if "modflow6_input" in da.attrs: + path = da.attrs["modflow6_input"].lower() + + if "modflow6_layer" in da.attrs: + layer = da.attrs["modflow6_layer"] + # convert indexing to 0-based + layer = layer - 1 + else: + layer = -1 + + if path not in var_d: + var_d[path] = {} + var_d[path][layer] = varname + + self._tags = dict(var_d) + + def _set_global_attrs(self, model_type, model_name): + if model_type.lower() == "gwf6": + dep_var = "hydraulic head" + model = "MODFLOW 6 Groundwater Flow (GWF)" + elif model_type.lower() == "gwt6": + dep_var = "concentration" + model = "MODFLOW 6 Groundwater Transport (GWT)" + elif model_type.lower() == "gwe6": + dep_var = "temperature" + model = "MODFLOW 6 Groundwater Energy (GWE)" + else: + dep_var = "model" + if model_type.endswith("6"): + mtype = re.sub(r"\d+$", "", model_type.upper()) + model = f"MODFLOW 6 {mtype}" + else: + model = model_type.upper() + + if self._nc_type == "structured": + grid = self._nc_type.upper() + conventions = "CF-1.11" + elif self._nc_type == "mesh2d": + grid = "LAYERED MESH" + conventions = "CF-1.11 UGRID-1.0" + + self._dataset.attrs["title"] = f"{model_name.upper()} {dep_var} input" + self._dataset.attrs["source"] = f"flopy v{flopy.__version__}" + self._dataset.attrs["modflow_grid"] = grid + self._dataset.attrs["modflow_model"] = f"{model_name.upper()}: {model} model" + self._dataset.attrs["history"] = "first created " + time.ctime(time.time()) + self._dataset.attrs["Conventions"] = conventions + + def _create_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + nc_shape: list, + longname: Optional[str], + ): + layer = -1 + if data.dtype == np.float64: + fillna = FILLNA_DBL + elif data.dtype == np.int32: + fillna = FILLNA_INT32 + elif data.dtype == np.int64: + # TODO + fillna = FILLNA_INT32 + + # set name and path + varname = f"{package.lower()}_{param.lower()}" + path = self.path(package, param) + + # create variable + var_d = {varname: (nc_shape, data)} + self._dataset = self._dataset.assign(var_d) + + # update var attrs + self._dataset[varname].attrs["_FillValue"] = fillna + if longname: + self._dataset[varname].attrs["long_name"] = longname + + # update mapping + if path not in self._tags: + self._tags[path] = {} + if layer in self._tags[path]: + raise Exception(f"Array variable path already exists: {path}") + self._tags[path][layer] = varname + + def _create_layered_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + nc_shape: list, + longname: Optional[str], + ): + if data.dtype == np.float64: + fillna = FILLNA_DBL + elif data.dtype == np.int32: + fillna = FILLNA_INT32 + elif data.dtype == np.int64: + # TODO + fillna = FILLNA_INT32 + + # set basename and path + basename = f"{package.lower()}_{param.lower()}" + path = self.path(package, param) + if path not in self._tags: + self._tags[path] = {} + + for layer in range(data.shape[0]): + # set varname + mf6_layer = layer + 1 + layer_vname = f"{basename}_l{mf6_layer}" + + # create variable + var_d = {layer_vname: (nc_shape, data[layer].flatten())} + self._dataset = self._dataset.assign(var_d) + + # update var attrs + self._dataset[layer_vname].attrs["_FillValue"] = fillna + if longname: + self._dataset[layer_vname].attrs["long_name"] = ( + f"{longname} layer={mf6_layer}" + ) + + # update mapping + if layer in self._tags[path]: + raise Exception( + f"Array variable path already exists: {path}, layer={layer}" + ) + self._tags[path][layer] = layer_vname + + +class DisNetCDFStructured(ModelNetCDFDataset): + def __init__(self): + super().__init__() + + def create_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + shape: list, + longname: Optional[str], + ): + data = np.array(data) + nc_shape = None + if len(data.shape) == 3: + nc_shape = ["z", "y", "x"] + elif len(data.shape) == 2: + nc_shape = ["y", "x"] + elif len(data.shape) == 1: + if shape[0].lower() == "nrow": + nc_shape = ["y"] + elif shape[0].lower() == "ncol": + nc_shape = ["x"] + + self._create_array(package, param, data, nc_shape, longname) + + def set_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + layer: Optional[int], + ): + data = np.array(data) + path = self.path(package, param, verify=True) + vname = self._tags[path][-1] + if len(self._dataset[vname].values.shape) == 1: + self._dataset[vname].values = data.flatten() + else: + if layer is not None and layer > -1: + self._dataset[vname].values[layer] = data + else: + self._dataset[vname].values = data + + def array(self, package: str, param: str, layer=None): + path = self.path(package, param, verify=True) + if len(self._dataset[self._tags[path][-1]].data.shape) == 3: + if layer > -1: + return self._dataset[self._tags[path][-1]].data[layer] + else: + return self._dataset[self._tags[path][-1]].data + else: + return self._dataset[self._tags[path][-1]].data + + def _set_grid(self, dis): + if dis.angrot != 0.0: + xoff = 0.0 + yoff = 0.0 + else: + xoff = dis.xoffset + yoff = dis.yoffset + + # set coordinate var bounds + x_bnds = [] + xv = xoff + dis.xyedges[0] + for idx, val in enumerate(xv): + if idx + 1 < len(xv): + bnd = [] + bnd.append(xv[idx]) + bnd.append(xv[idx + 1]) + x_bnds.append(bnd) + + y_bnds = [] + yv = yoff + dis.xyedges[1] + for idx, val in enumerate(yv): + if idx + 1 < len(yv): + bnd = [] + bnd.append(yv[idx + 1]) + bnd.append(yv[idx]) + y_bnds.append(bnd) + + # set coordinate vars + x = xoff + dis.xycenters[0] + y = yoff + dis.xycenters[1] + z = [float(x) for x in range(1, dis.nlay + 1)] + + # create coordinate vars + var_d = {"z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} + self._dataset = self._dataset.assign(var_d) + + # create bound vars + var_d = {"x_bnds": (["x", "bnd"], x_bnds), "y_bnds": (["y", "bnd"], y_bnds)} + self._dataset = self._dataset.assign(var_d) + + # set coordinate variable attributes + self._dataset["z"].attrs["units"] = "layer" + self._dataset["z"].attrs["long_name"] = "layer number" + self._dataset["y"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["y"].attrs["axis"] = "Y" + self._dataset["y"].attrs["standard_name"] = "projection_y_coordinate" + self._dataset["y"].attrs["long_name"] = "Northing" + self._dataset["y"].attrs["bounds"] = "y_bnds" + self._dataset["x"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["x"].attrs["axis"] = "X" + self._dataset["x"].attrs["standard_name"] = "projection_x_coordinate" + self._dataset["x"].attrs["long_name"] = "Easting" + self._dataset["x"].attrs["bounds"] = "x_bnds" + + def _set_coords(self, crs): + if crs is None or self._modelgrid is None: + return "x y" + + lats = [] + lons = [] + xdim = self._dataset.sizes["x"] + ydim = self._dataset.sizes["y"] + + try: + epsg_code = crs.to_epsg(min_confidence=90) + proj = Proj( + f"EPSG:{epsg_code}", + ) + except Exception as e: + warnings.warn( + f"Cannot create coordinates from CRS: {e}", + UserWarning, + ) + return "x y" + + x_local = [] + y_local = [] + xycenters = self._modelgrid.xycenters + for y in xycenters[1]: + for x in xycenters[0]: + x_local.append(x) + y_local.append(y) + + x_global, y_global = self._modelgrid.get_coords(x_local, y_local) + + for i, x in enumerate(x_global): + lon, lat = proj(x, y_global[i], inverse=True) + lats.append(lat) + lons.append(lon) + + lats = np.array(lats) + lons = np.array(lons) + + # create coordinate vars + var_d = { + "lat": (["y", "x"], lats.reshape(ydim, xdim)), + "lon": (["y", "x"], lons.reshape(ydim, xdim)), + } + self._dataset = self._dataset.assign(var_d) + + # set coordinate attributes + self._dataset["lat"].attrs["units"] = "degrees_north" + self._dataset["lat"].attrs["standard_name"] = "latitude" + self._dataset["lat"].attrs["long_name"] = "latitude" + self._dataset["lon"].attrs["units"] = "degrees_east" + self._dataset["lon"].attrs["standard_name"] = "longitude" + self._dataset["lon"].attrs["long_name"] = "longitude" + + return "lon lat" + + +class DisNetCDFMesh2d(ModelNetCDFDataset): + def __init__(self): + super().__init__() + + def create_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + shape: list, + longname: Optional[str], + ): + data = np.array(data) + nc_shape = None + if len(data.shape) == 1: + if shape[0].lower() == "nrow": + nc_shape = ["y"] + elif shape[0].lower() == "ncol": + nc_shape = ["x"] + else: + nc_shape = ["nmesh_face"] + + if len(data.shape) == 3: + self._create_layered_array(package, param, data, nc_shape, longname) + else: + self._create_array(package, param, data.flatten(), nc_shape, longname) + + def set_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + layer: Optional[int], + ): + data = np.array(data) + path = self.path(package, param, verify=True) + if layer is not None and layer in self._tags[path]: + vname = self._tags[path][layer] + else: + vname = self._tags[path][-1] + self._dataset[vname].values = data.flatten() + + def array(self, package: str, param: str, layer=None): + path = self.path(package, param, verify=True) + if path in self._tags: + if layer is None or layer == -1: + if layer == -1 and layer in self._tags[path]: + return self._dataset[self._tags[path][layer]].data + else: + data = [] + for l in self._tags[path]: + data.append(self._dataset[self._tags[path][l]].data) + return np.array(data) + elif layer in self._tags[path]: + return self._dataset[self._tags[path][layer]].data + + return None + + def _set_grid(self, dis): + # mesh container variable + self._dataset = self._dataset.assign({"mesh": ([], np.int32(1))}) + self._dataset["mesh"].attrs["cf_role"] = "mesh_topology" + self._dataset["mesh"].attrs["long_name"] = "2D mesh topology" + self._dataset["mesh"].attrs["topology_dimension"] = np.int32(2) + self._dataset["mesh"].attrs["face_dimension"] = "nmesh_face" + self._dataset["mesh"].attrs["node_coordinates"] = "mesh_node_x mesh_node_y" + self._dataset["mesh"].attrs["face_coordinates"] = "mesh_face_x mesh_face_y" + self._dataset["mesh"].attrs["face_node_connectivity"] = "mesh_face_nodes" + + # mesh node x and y + var_d = { + "mesh_node_x": (["nmesh_node"], dis.verts[:, 0]), + "mesh_node_y": (["nmesh_node"], dis.verts[:, 1]), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_node_x"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["mesh_node_x"].attrs["standard_name"] = "projection_x_coordinate" + self._dataset["mesh_node_x"].attrs["long_name"] = "Easting" + self._dataset["mesh_node_y"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["mesh_node_y"].attrs["standard_name"] = "projection_y_coordinate" + self._dataset["mesh_node_y"].attrs["long_name"] = "Northing" + + # mesh face x and y + x_bnds = [] + x_verts = dis.verts[:, 0].reshape(dis.nrow + 1, dis.ncol + 1) + for i in range(dis.nrow): + if i + 1 > dis.nrow: + break + for j in range(dis.ncol): + if j + 1 <= dis.ncol: + bnd = [] + bnd.append(x_verts[i + 1][j]) + bnd.append(x_verts[i + 1][j + 1]) + bnd.append(x_verts[i][j + 1]) + bnd.append(x_verts[i][j]) + x_bnds.append(bnd) + + y_bnds = [] + y_verts = dis.verts[:, 1].reshape(dis.nrow + 1, dis.ncol + 1) + for i in range(dis.nrow): + if i + 1 > dis.nrow: + break + for j in range(dis.ncol): + if j + 1 <= dis.ncol: + bnd = [] + bnd.append(y_verts[i + 1][j]) + bnd.append(y_verts[i + 1][j + 1]) + bnd.append(y_verts[i][j + 1]) + bnd.append(y_verts[i][j]) + y_bnds.append(bnd) + + var_d = { + "mesh_face_x": (["nmesh_face"], dis.xcellcenters.flatten()), + "mesh_face_xbnds": (["nmesh_face", "max_nmesh_face_nodes"], x_bnds), + "mesh_face_y": (["nmesh_face"], dis.ycellcenters.flatten()), + "mesh_face_ybnds": (["nmesh_face", "max_nmesh_face_nodes"], y_bnds), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_face_x"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["mesh_face_x"].attrs["standard_name"] = "projection_x_coordinate" + self._dataset["mesh_face_x"].attrs["long_name"] = "Easting" + self._dataset["mesh_face_x"].attrs["bounds"] = "mesh_face_xbnds" + self._dataset["mesh_face_y"].attrs["units"] = lenunits[dis.lenuni] + self._dataset["mesh_face_y"].attrs["standard_name"] = "projection_y_coordinate" + self._dataset["mesh_face_y"].attrs["long_name"] = "Northing" + self._dataset["mesh_face_y"].attrs["bounds"] = "mesh_face_ybnds" + + # mesh face nodes + max_face_nodes = 4 + face_nodes = [] + for r in dis.iverts: + nodes = [np.int32(x + 1) for x in r] + nodes.reverse() + face_nodes.append(nodes) + + var_d = { + "mesh_face_nodes": (["nmesh_face", "max_nmesh_face_nodes"], face_nodes), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_face_nodes"].attrs["cf_role"] = "face_node_connectivity" + self._dataset["mesh_face_nodes"].attrs["long_name"] = ( + "Vertices bounding cell (counterclockwise)" + ) + self._dataset["mesh_face_nodes"].attrs["_FillValue"] = FILLNA_INT32 + self._dataset["mesh_face_nodes"].attrs["start_index"] = np.int32(1) + + def _set_coords(self, crs): + return "mesh_face_x mesh_face_y" + + +class DisvNetCDFMesh2d(ModelNetCDFDataset): + def __init__(self): + super().__init__() + + def create_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + shape: list, + longname: Optional[str], + ): + data = np.array(data) + nc_shape = ["nmesh_face"] + + if len(data.shape) == 2: + self._create_layered_array(package, param, data, nc_shape, longname) + else: + self._create_array(package, param, data.flatten(), nc_shape, longname) + + def set_array( + self, + package: str, + param: str, + data: np.typing.ArrayLike, + layer: Optional[int], + ): + data = np.array(data) + path = self.path(package, param, verify=True) + if layer is not None and layer in self._tags[path]: + vname = self._tags[path][layer] + else: + vname = self._tags[path][-1] + self._dataset[vname].values = data.flatten() + + def array(self, package: str, param: str, layer=None): + path = self.path(package, param, verify=True) + if path in self._tags: + if layer is None or layer == -1: + if layer == -1 and layer in self._tags[path]: + return self._dataset[self._tags[path][layer]].data + else: + data = [] + for l in self._tags[path]: + data.append(self._dataset[self._tags[path][l]].data) + return np.array(data) + elif layer in self._tags[path]: + return self._dataset[self._tags[path][layer]].data + + return None + + def _set_grid(self, disv): + # mesh container variable + self._dataset = self._dataset.assign({"mesh": ([], np.int32(1))}) + self._dataset["mesh"].attrs["cf_role"] = "mesh_topology" + self._dataset["mesh"].attrs["long_name"] = "2D mesh topology" + self._dataset["mesh"].attrs["topology_dimension"] = np.int32(2) + self._dataset["mesh"].attrs["face_dimension"] = "nmesh_face" + self._dataset["mesh"].attrs["node_coordinates"] = "mesh_node_x mesh_node_y" + self._dataset["mesh"].attrs["face_coordinates"] = "mesh_face_x mesh_face_y" + self._dataset["mesh"].attrs["face_node_connectivity"] = "mesh_face_nodes" + + # mesh node x and y + var_d = { + "mesh_node_x": (["nmesh_node"], disv.verts[:, 0]), + "mesh_node_y": (["nmesh_node"], disv.verts[:, 1]), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_node_x"].attrs["units"] = lenunits[disv.lenuni] + self._dataset["mesh_node_x"].attrs["standard_name"] = "projection_x_coordinate" + self._dataset["mesh_node_x"].attrs["long_name"] = "Easting" + self._dataset["mesh_node_y"].attrs["units"] = lenunits[disv.lenuni] + self._dataset["mesh_node_y"].attrs["standard_name"] = "projection_y_coordinate" + self._dataset["mesh_node_y"].attrs["long_name"] = "Northing" + + # determine max number of cell vertices + cell_nverts = [cell2d[3] for cell2d in disv.cell2d] + max_face_nodes = max(cell_nverts) + + # mesh face x and y + x_bnds = [] + for x in disv.xvertices: + x = x[::-1] + if len(x) < max_face_nodes: + # TODO: set fill value? + x.extend([FILLNA_INT32] * (max_face_nodes - len(x))) + x_bnds.append(x) + + y_bnds = [] + for y in disv.yvertices: + y = y[::-1] + if len(y) < max_face_nodes: + # TODO: set fill value? + y.extend([FILLNA_INT32] * (max_face_nodes - len(y))) + y_bnds.append(y) + + var_d = { + "mesh_face_x": (["nmesh_face"], disv.xcellcenters), + "mesh_face_xbnds": (["nmesh_face", "max_nmesh_face_nodes"], x_bnds), + "mesh_face_y": (["nmesh_face"], disv.ycellcenters), + "mesh_face_ybnds": (["nmesh_face", "max_nmesh_face_nodes"], y_bnds), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_face_x"].attrs["units"] = lenunits[disv.lenuni] + self._dataset["mesh_face_x"].attrs["standard_name"] = "projection_x_coordinate" + self._dataset["mesh_face_x"].attrs["long_name"] = "Easting" + self._dataset["mesh_face_x"].attrs["bounds"] = "mesh_face_xbnds" + self._dataset["mesh_face_y"].attrs["units"] = lenunits[disv.lenuni] + self._dataset["mesh_face_y"].attrs["standard_name"] = "projection_y_coordinate" + self._dataset["mesh_face_y"].attrs["long_name"] = "Northing" + self._dataset["mesh_face_y"].attrs["bounds"] = "mesh_face_ybnds" + + # mesh face nodes + face_nodes = [] + for idx, r in enumerate(disv.cell2d): + nodes = disv.cell2d[idx][4 : 4 + r[3]] + nodes = [np.int32(x + 1) for x in nodes] + nodes.reverse() + if len(nodes) < max_face_nodes: + # TODO set fill value? + nodes.extend([FILLNA_INT32] * (max_face_nodes - len(nodes))) + face_nodes.append(nodes) + + var_d = { + "mesh_face_nodes": (["nmesh_face", "max_nmesh_face_nodes"], face_nodes), + } + self._dataset = self._dataset.assign(var_d) + self._dataset["mesh_face_nodes"].attrs["cf_role"] = "face_node_connectivity" + self._dataset["mesh_face_nodes"].attrs["long_name"] = ( + "Vertices bounding cell (counterclockwise)" + ) + self._dataset["mesh_face_nodes"].attrs["_FillValue"] = FILLNA_INT32 + self._dataset["mesh_face_nodes"].attrs["start_index"] = np.int32(1) + + def _set_coords(self, crs): + return "mesh_face_x mesh_face_y" + + +def open_dataset(nc_fpth: str, grid_type: str) -> ModelNetCDFDataset: + """ + Open an existing dataset. + + Args: + nc_fpth (str): The path of the existing NetCDF file. + grid_type (str): The FloPy discretizaton type corresponding + to the model associated with the file: vertex or structured. + + Returns: + ModelNetCDFDataset: A dataset derived from the base class. + """ + nc_dataset = None + grid_t = grid_type.lower() + + # grid_type corresponds to a flopy.discretization type + if grid_t != "vertex" and grid_t != "structured": + raise Exception( + "Supported NetCDF discretication types " + 'are "vertex" (DISV) and "structured" ' + "(DIS)" + ) + + fpth = Path(nc_fpth).resolve() + dataset = xr.open_dataset(fpth, engine="netcdf4") + + if ("modflow_grid" and "modflow_model") not in dataset.attrs: + modelname = None + gridtype = None + else: + modelname = dataset.attrs["modflow_model"].split(":")[0].lower() + gridtype = dataset.attrs["modflow_grid"].lower() + if grid_t == "vertex": + if gridtype == "layered mesh": + nc_dataset = DisvNetCDFMesh2d() + elif grid_t == "structured": + if gridtype == "layered mesh": + nc_dataset = DisNetCDFMesh2d() + elif gridtype == "structured": + nc_dataset = DisNetCDFStructured() + + dataset.close() + + if nc_dataset: + nc_dataset.open(fpth) + else: + raise Exception( + f"Unable to load netcdf dataset for file grid " + f'type "{gridtype}" and discretization grid ' + f'type "{grid_t}"' + ) + + return nc_dataset + + +def create_dataset( + modeltype: str, modelname: str, nc_type: str, nc_fname: str, modelgrid: Grid +) -> ModelNetCDFDataset: + """ + Create a new dataset. + + Args: + modeltype (str): A model type, e.g. GWF6. + modelname (str): The model name. + nc_type (str): A supported NetCDF file type: mesh2d or structured. + nc_fname (str): The generated NetCDF file name. + modelgrid (Grid): A FloPy derived discretization object. + + Returns: + ModelNetCDFDataset: A dataset derived from the base class. + """ + nc_dataset = None + if isinstance(modelgrid, VertexGrid): + if nc_type.lower() == "mesh2d": + nc_dataset = DisvNetCDFMesh2d() + elif isinstance(modelgrid, StructuredGrid): + if nc_type.lower() == "mesh2d": + nc_dataset = DisNetCDFMesh2d() + elif nc_type.lower() == "structured": + nc_dataset = DisNetCDFStructured() + + if nc_dataset: + nc_dataset.create(modeltype, modelname, nc_type, nc_fname, modelgrid) + else: + raise Exception( + f"Unable to generate netcdf dataset for file type " + f'"{nc_type.lower()}" and discretization grid type ' + f'"{modelgrid.grid_type}"' + ) + + return nc_dataset diff --git a/pyproject.toml b/pyproject.toml index d49d8fed41..0cff25d6e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,7 @@ optional = [ "scipy", "shapely >=2.0", "vtk >=9.4.0", + "xarray", "xmipy", "h5py", ]