diff --git a/docs/examples/04_modifying_layermodels.ipynb b/docs/examples/04_modifying_layermodels.ipynb index 84d3c338..5a37f1e9 100644 --- a/docs/examples/04_modifying_layermodels.ipynb +++ b/docs/examples/04_modifying_layermodels.ipynb @@ -268,9 +268,6 @@ "metadata": {}, "outputs": [], "source": [ - "layer_names = []\n", - "colors_new = {}\n", - "\n", "for j, i in split_reindexer.items():\n", " if j not in colors:\n", " colors[j] = colors[i]" diff --git a/docs/examples/11_grid_rotation.ipynb b/docs/examples/11_grid_rotation.ipynb index e522cc55..3f4d9132 100644 --- a/docs/examples/11_grid_rotation.ipynb +++ b/docs/examples/11_grid_rotation.ipynb @@ -239,10 +239,7 @@ "oc = nlmod.gwf.oc(ds, gwf)\n", "\n", "# create recharge package\n", - "rch = nlmod.gwf.rch(ds, gwf)\n", - "\n", - "# create storage package\n", - "sto = nlmod.gwf.sto(ds, gwf)" + "rch = nlmod.gwf.rch(ds, gwf)" ] }, { diff --git a/docs/examples/12_layer_generation.ipynb b/docs/examples/12_layer_generation.ipynb index d11e5ae3..a3185d8b 100644 --- a/docs/examples/12_layer_generation.ipynb +++ b/docs/examples/12_layer_generation.ipynb @@ -71,7 +71,7 @@ "outputs": [], "source": [ "f, ax = nlmod.plot.get_map(extent, figsize=5)\n", - "nlmod.plot.data_array(regis[\"top\"].max(\"layer\"), edgecolor=\"k\")" + "nlmod.plot.data_array(regis[\"top\"], edgecolor=\"k\")" ] }, { diff --git a/docs/examples/17_unsaturated_zone_flow.ipynb b/docs/examples/17_unsaturated_zone_flow.ipynb index 68c675ad..6dcd1256 100644 --- a/docs/examples/17_unsaturated_zone_flow.ipynb +++ b/docs/examples/17_unsaturated_zone_flow.ipynb @@ -95,7 +95,7 @@ "y = np.floor(y / 100) * 100\n", "dx = dy = 100\n", "extent = [x, x + dx, y, y + dy]\n", - "regis = nlmod.read.regis.get_regis(extent, cachename=\"regis.nc\", cachedir=cachedir)" + "regis = nlmod.read.regis.get_regis(extent, drop_layer_dim_from_top=False, cachename=\"regis.nc\", cachedir=cachedir)" ] }, { diff --git a/nlmod/dims/base.py b/nlmod/dims/base.py index 5c238db9..56e5c1e0 100644 --- a/nlmod/dims/base.py +++ b/nlmod/dims/base.py @@ -179,7 +179,7 @@ def to_model_ds( return ds -def extrapolate_ds(ds, mask=None): +def extrapolate_ds(ds, mask=None, layer="layer"): """Fill missing data in layermodel based on nearest interpolation. Used for ensuring layer model contains data everywhere. Useful for @@ -190,10 +190,12 @@ def extrapolate_ds(ds, mask=None): ---------- ds : xarray.DataSet Model layer DataSet - mask: np.ndarray, optional + mask : np.ndarray, optional Boolean mask for each cell, with a value of True if its value needs to be determined. When mask is None, it is determined from the botm- variable. The default is None. + layer : str, optional + The name of the layer dimension. The default is 'layer'. Returns ------- @@ -201,7 +203,7 @@ def extrapolate_ds(ds, mask=None): filled layermodel """ if mask is None: - mask = np.isnan(ds["botm"]).all("layer").data + mask = np.isnan(ds["botm"]).all(layer).data if not mask.any(): # all of the model cells are is inside the known area return ds @@ -225,9 +227,9 @@ def extrapolate_ds(ds, mask=None): data = ds[key].data if ds[key].dims == dims: if np.isnan(data[mask]).sum() > 0: # do not update if no NaNs - data[mask] = data[~mask, i] - elif ds[key].dims == ("layer",) + dims: - for lay in range(len(ds["layer"])): + data[mask] = data[~mask][i] + elif ds[key].dims == (layer,) + dims: + for lay in range(len(ds[layer])): if np.isnan(data[lay, mask]).sum() > 0: # do not update if no NaNs data[lay, mask] = data[lay, ~mask][i] else: diff --git a/nlmod/dims/layers.py b/nlmod/dims/layers.py index d540674e..4df04b70 100644 --- a/nlmod/dims/layers.py +++ b/nlmod/dims/layers.py @@ -240,6 +240,13 @@ def split_layers_ds( logger.info(f"Splitting layers {list(split_dict)}") + if "layer" in ds["top"].dims: + msg = "Top in ds has a layer dimension. split_layers_ds will remove the layer dimension from top in ds." + logger.warning(msg) + else: + ds = ds.copy() + ds["top"] = ds["botm"] + calculate_thickness(ds) + layers_org = layers.copy() # add extra layers (keep the original ones for now, as we will copy data first) for lay0 in split_dict: @@ -267,6 +274,9 @@ def split_layers_ds( # drop the original layers ds = ds.drop_sel(layer=list(split_dict)) + # remove layer dimension from top again + ds = remove_layer_dim_from_top(ds, inconsistency_threshold=0.001) + if return_reindexer: # determine reindexer reindexer = OrderedDict(zip(layers, layers_org)) @@ -559,6 +569,13 @@ def combine_layers_ds( # calculate new tops/bots logger.info("Calculating new layer tops and bottoms...") + if "layer" in ds["top"].dims: + msg = "Top in ds has a layer dimension. combine_layers_ds will remove the layer dimension from top in ds." + logger.warning(msg) + else: + ds = ds.copy() + ds["top"] = ds["botm"] + calculate_thickness(ds) + da_dict = {} new_top, new_bot, reindexer = layer_combine_top_bot( @@ -604,6 +621,9 @@ def combine_layers_ds( logger.info("Done! Created new dataset with combined layers!") ds_combine = xr.Dataset(da_dict, attrs=attrs) + # remove layer dimension from top again + ds = remove_layer_dim_from_top(ds, inconsistency_threshold=0.001) + return ds_combine @@ -664,7 +684,7 @@ def add_kh_kv_from_ml_layer_to_ds( return ds -def set_model_top(ds, top, min_thickness=0.0): +def set_model_top(ds, top, min_thickness=0.0, copy=True): """Set the model top, changing layer bottoms when necessary as well. If the new top is higher than the previous top, the extra thickness is added to the @@ -676,24 +696,26 @@ def set_model_top(ds, top, min_thickness=0.0): The model dataset, containing the current top. top : xarray.DataArray The new model top, with the same shape as the current top. + min_thickness : float, optional + The minimum thickness of top layers. When the thickness is less than + min_thickness, the thicjness is added to a deeper layer. The default is 0.0. + copy : bool, optional + If copy=True, data in the return value is always copied, so the original Dataset + is not altered. The default is True. Returns ------- ds : xarray.Dataset The model dataset, containing the new top. """ - if "gridtype" not in ds.attrs: - raise ( - KeyError( - "Dataset does not have attribute 'gridtype'. " - "Either add attribute or use nlmod functions to build the dataset." - ) - ) if "layer" in ds["top"].dims: raise (ValueError("set_model_top does not support top with a layer dimension")) + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) if isinstance(top, (float, int)): top = xr.full_like(ds["top"], top) - if not top.shape == ds["top"].shape: + elif not top.shape == ds["top"].shape: raise ( ValueError("Please make sure the new top has the same shape as the old top") ) @@ -703,7 +725,7 @@ def set_model_top(ds, top, min_thickness=0.0): # set the botm to the new top at these locations ds["botm"] = ds["botm"].where(ds["botm"] != ds["top"], top) # make sure the botm is never higher than the new top - ds["botm"] = ds["botm"].where(top - ds["botm"] > min_thickness, top) + ds["botm"] = ds["botm"].where((top - ds["botm"]) > min_thickness, top) # change the current top ds["top"] = top # remove inactive layers @@ -711,11 +733,14 @@ def set_model_top(ds, top, min_thickness=0.0): return ds -def set_layer_top(ds, layer, top): +def set_layer_top(ds, layer, top, copy=True): """Set the top of a layer.""" assert layer in ds.layer if "layer" in ds["top"].dims: raise (ValueError("set_layer_top does not support top with a layer dimension")) + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) lay = np.where(ds.layer == layer)[0][0] if lay == 0: # change the top of the model @@ -736,27 +761,31 @@ def set_layer_top(ds, layer, top): return ds -def set_layer_botm(ds, layer, botm): +def set_layer_botm(ds, layer, botm, copy=True): """Set the bottom of a layer.""" assert layer in ds.layer if "layer" in ds["top"].dims: raise (ValueError("set_layer_botm does not support top with a layer dimension")) + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) lay = np.where(ds.layer == layer)[0][0] - # if lay > 0 and np.any(botm > ds["botm"][lay - 1]): - # raise (Exception("set_layer_botm cannot change botm of higher layers yet")) + # make sure the botm of the layers above is lever lower than the new botm ds["botm"][:lay] = ds["botm"][:lay].where(ds["botm"][:lay] > botm, botm) ds["botm"][lay] = botm # make sure the botm of the layers below is never higher than the new botm mask = ds["botm"][lay + 1 :] < botm ds["botm"][lay + 1 :] = ds["botm"][lay + 1 :].where(mask, botm) - # make sure the botm of the layers above is lever lower than the new botm return ds -def set_layer_thickness(ds, layer, thickness, change="botm"): +def set_layer_thickness(ds, layer, thickness, change="botm", copy=True): """Set the layer thickness by changing the bottom of the layer.""" assert layer in ds.layer assert change == "botm", "Only change=botm allowed for now" + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) lay = np.where(ds.layer == layer)[0][0] if lay == 0: top = ds["top"] @@ -767,11 +796,14 @@ def set_layer_thickness(ds, layer, thickness, change="botm"): return ds -def set_minimum_layer_thickness(ds, layer, min_thickness, change="botm"): +def set_minimum_layer_thickness(ds, layer, min_thickness, change="botm", copy=True): """Make sure layer has a minimum thickness by lowering the botm of the layer where neccesary.""" assert layer in ds.layer assert change == "botm", "Only change=botm allowed for now" + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) lay = np.where(ds.layer == layer)[0][0] if lay == 0: top = ds["top"] @@ -785,7 +817,7 @@ def set_minimum_layer_thickness(ds, layer, min_thickness, change="botm"): return ds -def remove_thin_layers(ds, min_thickness=0.1): +def remove_thin_layers(ds, min_thickness=0.1, copy=True): """Remove layers from cells with a thickness less than min_thickness The thickness of the removed cells is added to the first active layer below @@ -793,10 +825,13 @@ def remove_thin_layers(ds, min_thickness=0.1): if "layer" in ds["top"].dims: msg = "remove_thin_layers does not support top with a layer dimension" raise (ValueError(msg)) + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) thickness = calculate_thickness(ds) for lay_org in range(len(ds.layer)): # determine where the layer is too thin - mask = (thickness[lay_org] > 0) & (thickness[lay_org] < min_thickness) + mask = thickness[lay_org] < min_thickness if mask.any(): # we will set the botm to the top in these cells, so we first get the top if lay_org == 0: @@ -982,7 +1017,7 @@ def fill_nan_top_botm_kh_kv( """ # 1 - ds = fill_top_and_bottom(ds) + ds = remove_layer_dim_from_top(ds) # 2 if remove_nan_layers: @@ -1002,54 +1037,186 @@ def fill_nan_top_botm_kh_kv( return ds -def fill_top_and_bottom(ds, drop_layer_dim_from_top=True): +def fill_nan_top_botm(ds): """ - Remove Nans in botm variable, and change top from 3d to 2d if necessary. + Remove Nans in non-existent layers in botm and top variables + + The NaNs are removed by setting the value to the top and botm of higher/lower + layers that do exist. Parameters ---------- ds : xr.DataSet model DataSet - drop_layer_dim_from_top : bool, optional - If True and top contains a layer dimension, set top to the top of the upper - layer (line the definition in MODFLOW). This removes redundant data, as the top - of all layers exept the most upper one is also defined as the bottom of previous - layers. The default is True. Returns ------- ds : xarray.Dataset - dataset with filled top and bottom data according to modflow definition, - with 2d top and 3d bottom. + dataset with filled top and botm data """ - if "layer" in ds["top"].dims: top_max = ds["top"].max("layer") - if drop_layer_dim_from_top: - ds["top"] = top_max else: top_max = ds["top"] + # fill nans in botm of the first layer + ds["botm"][0] = ds["botm"][0].where(~ds["botm"][0].isnull(), top_max) + + # then use ffill to fill nans in deeper layers + ds["botm"] = ds["botm"].ffill("layer") - botm = ds["botm"].data - # remove nans from botm - for lay in range(botm.shape[0]): - mask = np.isnan(botm[lay]) - if lay == 0: - # by setting the botm to top_max - botm[lay, mask] = top_max.data[mask] - else: - # by setting the botm to the botm of the layer above - botm[lay, mask] = botm[lay - 1, mask] if "layer" in ds["top"].dims: # remove nans from top by setting it equal to botm # which sets the layer thickness to 0 - top = ds["top"].data - mask = np.isnan(top) - top[mask] = botm[mask] + ds["top"] = ds["top"].where(~ds["top"].isnull(), ds["botm"]) + return ds + + +def set_nan_top_and_botm(ds, copy=True): + """ + Sets Nans for non-existent layers in botm and top variables + + Nans are only added to top when it contains a layer dimension. + + Parameters + ---------- + ds : xr.DataSet + model DataSet + copy : bool, optional + If copy=True, data in the return value is always copied, so the original Dataset + is not altered. The default is True. + + Returns + ------- + ds : xarray.Dataset + dataset with nans in top and botm at non-existent layers. + """ + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) + thickness = calculate_thickness(ds) + mask = thickness > 0 + if "layer" in ds["top"].dims: + ds["top"] = ds["top"].where(mask) + ds["botm"] = ds["botm"].where(mask) + return ds + +def remove_layer_dim_from_top( + ds, + check=True, + set_non_existing_layers_to_nan=False, + inconsistency_threshold=0.0, + return_inconsistencies=False, + copy=True, +): + """ + Change top from 3d to 2d, removing NaNs in top and botm in the process. + + This method sets variable `top` to the top of the upper layer (like the definition + in MODFLOW). This removes redundant data, as the top of all layers exept the most + upper one is also defined as the bottom of lower layers. + + Parameters + ---------- + ds : xr.DataSet + model DataSet + check : bool, optional + If True, checks for inconsistencies in the layer model and report to logger as + warning. The defaults is True. + set_non_existing_layers_to_nan bool, optional + If True, sets the value of the botm-variable to NaN for non-existent layers. + This is not recommended, as this might break some procedures in nlmod. The + default is False. + inconsistency_threshold : float, optional + The threshold, above which to report inconsistencies in the layer model (where + the botm of a layer is not equal to top of a deeper layer). The default is 0.0. + return_inconsistencies : bool, optional + if True, return the difference between the top of layers and the botm of a + deeper layer, at the location where inconsitencies have been reported. + copy : bool, optional + If copy=True, data in the return value is always copied, so the original Dataset + is not altered. The default is True. + + Returns + ------- + ds : xarray.Dataset or numpy.array + dataset without a layer dimension in top, if return_inconsistencies is False. If + return_inconsistencies is True, a numpy arrays with non-equal top and bottoms is + returned. + """ + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) + if "layer" in ds["top"].dims: + ds = fill_nan_top_botm(ds) + if check: + dz = ds["botm"][:-1].data - ds["top"][1:].data + inconsistencies = np.abs(dz) > inconsistency_threshold + n = inconsistencies.sum() + if n > 0: + msg = f"Botm of layer is not equal to top of deeper layer in {n} cells" + logger.warning(msg) + if return_inconsistencies: + return np.vstack( + ( + ds["botm"].data[:-1][inconsistencies], + ds["top"].data[1:][inconsistencies], + ) + ) + ds["top"] = ds["top"][0] + if set_non_existing_layers_to_nan: + ds = set_nan_top_and_botm(ds, copy=False) + return ds + + +def add_layer_dim_to_top(ds, set_non_existing_layers_to_nan=True, copy=True): + """ + Change top from 2d to 3d, setting top and botm to NaN for non-existent layers. + + Parameters + ---------- + ds : xr.DataSet + model DataSet + set_non_existing_layers_to_nan : bool, optional + If True, set the values of top and botm to . The default is True. + copy : bool, optional + If copy=True, data in the return value is always copied, so the original Dataset + is not altered. The default is True. + + Returns + ------- + ds : xarray.Dataset + dataset with a layer dimension in top. + """ + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) + ds["top"] = ds["botm"] + calculate_thickness(ds) + if set_non_existing_layers_to_nan: + ds = set_nan_top_and_botm(ds, copy=False) return ds +def convert_to_modflow_top_bot(ds, **kwargs): + """ + Removes the layer dimension from top and fills nans in top and botm. + + Alias to remove_layer_dim_from_top + + """ + ds = remove_layer_dim_from_top(ds, **kwargs) + + +def convert_to_regis_top_bot(ds, **kwargs): + """ + Adds a layer dimension to top and sets non-existing cells to nan in top and botm. + + Alias to add_layer_dim_to_top + + """ + ds = add_layer_dim_to_top(ds, **kwargs) + + def remove_inactive_layers(ds): """ Remove layers which only contain inactive cells @@ -1379,20 +1546,17 @@ def insert_layer(ds, name, top, bot, kh=None, kv=None, copy=True): needs to be inserted above the existing layer, and if the top or bottom of the existing layer needs to be altered. - For now, this method needs a layer model with a 3d-top, like you get using the - method `nlmod.read.get_regis()`, and does not function for a model Dataset with a 2d - (structured) or 1d (vertex) top. - When comparing the height of the new layer with an existing layer, there are 7 options: 1 The new layer is entirely above the existing layer: layer is added completely above existing layer. When the bottom of the new layer is above the top of the existing layer (which can happen for the first layer), this creates a gap in the - layer model. + layer model. In the returned Dataset, this gap is added to the existing layer. - 2 part of the new layer lies within an existing layer, bottom is never below: layer - is added above the existing layer, and the top of existing layer is lowered. + 2 part of the new layer lies within an existing layer, but the bottom of the new + layer is never below the bottom of the existing layer: layer is added above the + existing layer, and the top of existing layer is lowered. 3 there are locations where the new layer is above the bottom of the existing layer, but below the top of the existing layer. The new layer splits the existing layer @@ -1441,8 +1605,14 @@ def insert_layer(ds, name, top, bot, kh=None, kv=None, copy=True): shape = ds["botm"].shape[1:] assert top.shape == shape assert bot.shape == shape - msg = "Inserting layers is only supported with a 3d top for now" - assert "layer" in ds["top"].dims, msg + if copy: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) + if "layer" in ds["top"].dims: + msg = "Top in ds has a layer dimension. insert_layer will remove the layer dimension from top in ds." + logger.warning(msg) + else: + ds["top"] = ds["botm"] + calculate_thickness(ds) if kh is not None: assert kh.shape == shape if kv is not None: @@ -1450,9 +1620,6 @@ def insert_layer(ds, name, top, bot, kh=None, kv=None, copy=True): todo = ~(np.isnan(top.data) | np.isnan(bot.data)) & ((top - bot).data > 0) if not todo.any(): logger.warning(f"Thickness of new layer {name} is never larger than 0") - if copy: - # make a copy, so we are sure we do not alter the original DataSet - ds = ds.copy(deep=True) isplit = None for layer in ds.layer.data: if not todo.any(): @@ -1545,6 +1712,8 @@ def insert_layer(ds, name, top, bot, kh=None, kv=None, copy=True): if isplit is not None: isplit += 1 ds = _insert_layer_below(ds, None, name, isplit, mask, top, bot, kh, kv, copy) + # remove layer dimension from top again + ds = remove_layer_dim_from_top(ds, inconsistency_threshold=0.001) return ds @@ -1596,6 +1765,8 @@ def remove_layer(ds, layer): if layer not in layers: raise (KeyError(f"layer '{layer}' not present in Dataset")) if "layer" not in ds["top"].dims: + # make a copy, so we are sure we do not alter the original DataSet + ds = ds.copy(deep=True) index = layers.index(layer) if index == 0: # lower the top to the second layer diff --git a/nlmod/gwf/gwf.py b/nlmod/gwf/gwf.py index abc3d30e..a279b1f1 100644 --- a/nlmod/gwf/gwf.py +++ b/nlmod/gwf/gwf.py @@ -928,7 +928,7 @@ def buy(ds, gwf, pname="buy", **kwargs): ds, "denseref", attr="denseref", value=kwargs.pop("denseref", None) ) - pdata = [(0, drhodc, crhoref, f"{ds.model_name}_gwt", "none")] + pdata = [(0, drhodc, crhoref, f"{ds.model_name}_gwt", "CONCENTRATION")] buy = flopy.mf6.ModflowGwfbuy( gwf, diff --git a/nlmod/gwf/horizontal_flow_barrier.py b/nlmod/gwf/horizontal_flow_barrier.py index d6e9b557..0c6cc07a 100644 --- a/nlmod/gwf/horizontal_flow_barrier.py +++ b/nlmod/gwf/horizontal_flow_barrier.py @@ -46,6 +46,9 @@ def get_hfb_spd(gwf, linestrings, hydchr=1 / 100, depth=None, elevation=None): cells = line2hfb(linestrings, gwf) + # drop cells on the edge of the model + cells = [x for x in cells if len(x) > 1] + spd = [] # hydchr = 1 / 100 # resistance of 100 days diff --git a/nlmod/plot/__init__.py b/nlmod/plot/__init__.py index a646702b..ffeb6bd0 100644 --- a/nlmod/plot/__init__.py +++ b/nlmod/plot/__init__.py @@ -5,6 +5,7 @@ data_array, facet_plot, geotop_lithok_in_cross_section, + geotop_lithok_on_map, map_array, modelgrid, surface_water, diff --git a/nlmod/plot/flopy.py b/nlmod/plot/flopy.py index b6be4b43..0efc2f90 100644 --- a/nlmod/plot/flopy.py +++ b/nlmod/plot/flopy.py @@ -450,6 +450,8 @@ def facet_plot( if iper is None: raise ValueError("Pass 'period' to select timestep to plot.") a = arr[iper] + else: + a = arr elif plot_dim == "time": ilay = layer iper = i @@ -457,6 +459,8 @@ def facet_plot( if ilay is None: raise ValueError("Pass 'layer' to select layer to plot.") a = arr[iper] + else: + a = arr else: raise ValueError("'plot_dim' must be one of ['layer', 'time']") diff --git a/nlmod/plot/plot.py b/nlmod/plot/plot.py index bfec9cf2..176dc81b 100644 --- a/nlmod/plot/plot.py +++ b/nlmod/plot/plot.py @@ -2,6 +2,7 @@ from functools import partial import flopy as fp +import xarray as xr import matplotlib.pyplot as plt import numpy as np import pandas as pd @@ -203,7 +204,7 @@ def data_array(da, ds=None, ax=None, rotated=False, edgecolor=None, **kwargs): y = da.y if rotated: if ds is None: - raise (Exception("Supply model dataset (ds) for grid information")) + raise (ValueError("Supply model dataset (ds) for grid information")) if "angrot" in ds.attrs and ds.attrs["angrot"] != 0.0: affine = get_affine_mod_to_world(ds) x, y = affine * np.meshgrid(x, y) @@ -223,7 +224,7 @@ def geotop_lithok_in_cross_section( The voxel-dataset from GeoTOP. It is downloaded with the method `nlmod.read.geotop.get_geotop()` if None. The default is None. ax : matplotlib.Axes, optional - The axes in whcih the cross-section is plotted. Will default to the current axes + The axes in which the cross-section is plotted. Will default to the current axes if None. The default is None. legend : bool, optional When True, add a legend to the plot with the lithology-classes. The default is @@ -259,29 +260,97 @@ def geotop_lithok_in_cross_section( lithok_props = geotop.get_lithok_props() cs = DatasetCrossSection(gt, line, layer="z", ax=ax, **kwargs) - lithoks = gt["lithok"].values - lithok_un = np.unique(lithoks[~np.isnan(lithoks)]) - array = np.full(lithoks.shape, np.NaN) - - colors = [] - for i, lithok in enumerate(lithok_un): - lithok = int(lithok) - array[lithoks == lithok] = i - colors.append(lithok_props.at[lithok, "color"]) - cmap = ListedColormap(colors) - norm = Normalize(-0.5, np.nanmax(array) + 0.5) + array, cmap, norm = _get_geotop_cmap_and_norm(gt["lithok"], lithok_props) cs.plot_array(array, norm=norm, cmap=cmap) if legend: # make a legend with dummy handles - handles = [] - for i, lithok in enumerate(lithok_un): - label = lithok_props.at[int(lithok), "name"] - handles.append(Patch(facecolor=colors[i], label=label)) - ax.legend(handles=handles, loc=legend_loc) + _add_geotop_lithok_legend(lithok_props, ax, lithok=gt["lithok"], loc=legend_loc) return cs +def geotop_lithok_on_map( + gt, z=None, ax=None, legend=True, legend_loc=None, lithok_props=None, **kwargs +): + """PLot the lithoclass-data of GeoTOP on a map. + + Parameters + ---------- + gt : xr.Dataset or xr.DataArray + The voxel-dataset from GeoTOP or the lithok-DataArray from GeoTOP. + z : float, optional + The z-value of the lithok-data to plot on the map. + ax : matplotlib.Axes, optional + The axes in which the array is plotted. Will default to the current axes if + None. The default is None. + legend : bool, optional + When True, add a legend to the plot with the lithology-classes. The default is + True. + legend_loc : None or str, optional + The location of the legend. See matplotlib documentation. The default is None. + lithok_props : pd.DataFrame, optional + A DataFrame containing the properties of the lithoclasses. + Will call nlmod.read.geotop.get_lithok_props() when None. The default is None. + + **kwargs : dict + kwargs are passed onto ax.pcolormesh. + + Returns + ------- + qm : matplotlib.collections.QuadMesh + + """ + if ax is None: + ax = plt.gca() + if isinstance(gt, xr.Dataset): + lithok = gt["lithok"] + else: + lithok = gt + if "z" in lithok.dims: + if z is None: + raise (ValueError("Select a depth (z) in the GeoTOP data first")) + lithok = lithok.sel(z=z, method="nearest") + if lithok_props is None: + lithok_props = geotop.get_lithok_props() + + array, cmap, norm = _get_geotop_cmap_and_norm(lithok, lithok_props) + qm = ax.pcolormesh(lithok.x, lithok.y, array, norm=norm, cmap=cmap, **kwargs) + if legend: + # make a legend with dummy handles + _add_geotop_lithok_legend(lithok_props, ax, lithok=lithok, loc=legend_loc) + return qm + + +def _add_geotop_lithok_legend(lithok_props, ax, lithok=None, **kwargs): + """Add a legend with lithok-data""" + handles = [] + if lithok is None: + lithoks = lithok_props.index + else: + lithoks = np.unique(lithok) + lithoks = lithoks[~np.isnan(lithoks)] + for index in lithoks: + color = lithok_props.at[index, "color"] + label = lithok_props.at[int(index), "name"] + handles.append(Patch(facecolor=color, label=label)) + return ax.legend(handles=handles, **kwargs) + + +def _get_geotop_cmap_and_norm(lithok, lithok_props): + """Get an array of lithok-values, with a corresponding colormap and norm""" + lithok_un = np.unique(lithok) + lithok_un = lithok_un[~np.isnan(lithok_un)] + array = np.full(lithok.shape, np.NaN) + colors = [] + for i, ilithok in enumerate(lithok_un): + ilithok = int(ilithok) + array[lithok == ilithok] = i + colors.append(lithok_props.at[ilithok, "color"]) + cmap = ListedColormap(colors) + norm = Normalize(-0.5, np.nanmax(array) + 0.5) + return array, cmap, norm + + def _get_figure(ax=None, da=None, ds=None, figsize=None, rotated=True, extent=None): # figure if ax is not None: diff --git a/nlmod/read/geotop.py b/nlmod/read/geotop.py index d4e2e425..45ff54ca 100644 --- a/nlmod/read/geotop.py +++ b/nlmod/read/geotop.py @@ -8,7 +8,7 @@ import xarray as xr from .. import NLMOD_DATADIR, cache -from ..dims.layers import insert_layer, remove_layer +from ..dims.layers import insert_layer, remove_layer, remove_layer_dim_from_top from ..util import MissingValueError logger = logging.getLogger(__name__) @@ -61,7 +61,11 @@ def get_kh_kv_table(kind="Brabant"): @cache.cache_netcdf def to_model_layers( - geotop_ds, strat_props=None, method_geulen="add_to_layer_below", **kwargs + geotop_ds, + strat_props=None, + method_geulen="add_to_layer_below", + drop_layer_dim_from_top=True, + **kwargs, ): """Convert geotop voxel dataset to layered dataset. @@ -86,6 +90,11 @@ def to_model_layers( layers, which can fail if a 'geul' is locally both below the top and above the bottom of another layer (splitting the layer in two, which is not supported). The default is "add_to_layer_below". + drop_layer_dim_from_top : bool, optional + When True, fill NaN values in top and botm and drop the layer dimension from + top. This will transform top and botm to the data model in MODFLOW. An advantage + of this data model is that the layer model is consistent by definition, with no + possibilities of gaps between layers. The default is True. kwargs : dict Kwargs are passed to `aggregate_to_ds()` @@ -205,6 +214,9 @@ def to_model_layers( ds.attrs["geulen"] = geulen + if drop_layer_dim_from_top: + ds = remove_layer_dim_from_top(ds) + if "kh" in geotop_ds and "kv" in geotop_ds: aggregate_to_ds(geotop_ds, ds, **kwargs) @@ -235,7 +247,7 @@ def get_geotop(extent, url=GEOTOP_URL, probabilities=False): url of geotop netcdf file. The default is http://www.dinodata.nl/opendap/GeoTOP/geotop.nc probabilities : bool, optional - if True, download probability data + if True, also download probability data. The default is False. Returns ------- diff --git a/nlmod/read/regis.py b/nlmod/read/regis.py index f22419be..05f21227 100644 --- a/nlmod/read/regis.py +++ b/nlmod/read/regis.py @@ -7,7 +7,7 @@ import xarray as xr from .. import cache -from ..dims.layers import calculate_thickness +from ..dims.layers import calculate_thickness, remove_layer_dim_from_top from . import geotop logger = logging.getLogger(__name__) @@ -99,6 +99,8 @@ def get_regis( botm_layer="AKc", variables=("top", "botm", "kh", "kv"), remove_nan_layers=True, + drop_layer_dim_from_top=True, + probabilities=False, ): """get a regis dataset projected on the modelgrid. @@ -116,8 +118,15 @@ def get_regis( entries in the list are 'top', 'botm', 'kD', 'c', 'kh', 'kv', 'sdh' and 'sdv'. The default is ("top", "botm", "kh", "kv"). remove_nan_layers : bool, optional - When True, layers which contain only NaNs for the botm array are removed. - The default is True. + When True, layers that do not occur in the requested extent (layers that contain + only NaN values for the botm array) are removed. The default is True. + drop_layer_dim_from_top : bool, optional + When True, fill NaN values in top and botm and drop the layer dimension from + top. This will transform top and botm to the data model in MODFLOW. An advantage + of this data model is that the layer model is consistent by definition, with no + possibilities of gaps between layers. The default is True. + probabilities : bool, optional + if True, also download probability data. The default is False. Returns ------- @@ -159,8 +168,14 @@ def get_regis( msg = "No data found. Please supply valid extent in the Netherlands in RD-coordinates" raise (Exception(msg)) + if drop_layer_dim_from_top: + ds = remove_layer_dim_from_top(ds) + # slice data vars - ds = ds[list(variables)] + if variables is not None: + if probabilities: + variables = variables + ("sdh", "sdv") + ds = ds[list(variables)] ds.attrs["extent"] = extent for datavar in ds: @@ -228,13 +243,21 @@ def add_geotop_to_regis_layers( geotop_k = geotop.get_lithok_props() gt = geotop.add_kh_and_kv(gt, geotop_k, anisotropy=anisotropy) + # copy the regis-dataset, before altering it + rg = rg.copy(deep=True) + if "layer" in rg["top"].dims: + msg = "Top in rg has a layer dimension. add_geotop_to_regis_layers will remove the layer dimension from top in rg." + logger.warning(msg) + else: + # temporarily add layer dimension to top in rg + rg["top"] = rg["botm"] + calculate_thickness(rg) + for layer in layers: # transform geotop data into layers gtl = geotop.to_model_layers(gt) - # make sure top is 3d - assert "layer" in rg["top"].dims, "Top of regis must be 3d" - assert "layer" in gtl["top"].dims, "Top of geotop layers must be 3d" + # temporarily add layer dimension to top in gtl + gtl["top"] = gtl["botm"] + calculate_thickness(gtl) # only keep the part of layers inside the regis layer top = rg["top"].loc[layer] @@ -260,6 +283,9 @@ def add_geotop_to_regis_layers( rg = xr.concat((rg, gtl), "layer") # we will then make sure the layer order is right rg = rg.reindex({"layer": layer_order}) + + # remove the layer dimension from top again + rg = remove_layer_dim_from_top(rg) return rg diff --git a/nlmod/read/waterboard.py b/nlmod/read/waterboard.py index 3434cb76..ff30970d 100644 --- a/nlmod/read/waterboard.py +++ b/nlmod/read/waterboard.py @@ -183,7 +183,8 @@ def get_configuration(): # "url": "https://gis.wetterskipfryslan.nl/arcgis/rest/services/Peilbelsuit_Friese_boezem/MapServer", # "index": "BLAEU_WFG_GPG_BEHEER_PBHIDENT", "url": "https://gis.wetterskipfryslan.nl/arcgis/rest/services/Peilen/MapServer", - "layer": 1, # PeilenPeilenbeheerkaart - Peilen + # "layer": 1, # PeilenPeilenbeheerkaart - Peilen + "layer": 4, # Peilgebied praktijk "index": "PBHIDENT", # "layer": 4, # Peilbesluitenkaart # "index": "GPGIDENT", @@ -270,12 +271,17 @@ def get_configuration(): config["Noorderzijlvest"] = { "bgt_code": "W0647", "watercourses": { - "url": "https://arcgis.noorderzijlvest.nl/server/rest/services/Legger/Legger_Watergangen_2012/MapServer", + # "url": "https://arcgis.noorderzijlvest.nl/server/rest/services/Legger/Legger_Watergangen_2012/MapServer", + "url": "https://arcgis.noorderzijlvest.nl/server/rest/services/Watergangen/Watergangen/MapServer", + "layer": 1, # primair + # "layer": 2, # secundair + "bottom_height": [["IWS_AVVHOBOS_L", "IWS_AVVHOBES_L"]], + "bottom_width": "AVVBODDR", "index": "OVKIDENT", }, "level_areas": { - "url": "https://arcgis.noorderzijlvest.nl/server/rest/services/Peilbeheer/Peilgebieden/MapServer", - "layer": 3, + "url": "https://arcgis.noorderzijlvest.nl/server/rest/services/VenH/Peilgebieden/MapServer", + "layer": 2, "index": "GPGIDENT", "summer_stage": "OPVAFWZP", "winter_stage": "OPVAFWWP", diff --git a/nlmod/util.py b/nlmod/util.py index 4abf90b8..a2c1c72b 100644 --- a/nlmod/util.py +++ b/nlmod/util.py @@ -461,11 +461,11 @@ def download_modpath_provisional_exe(bindir=None, timeout=120): if not os.path.isdir(bindir): os.makedirs(bindir) if sys.platform.startswith("win"): - fname = "mp7_win64_20230911_8eca8d8.exe" + fname = "mp7_win64_20231016_86b38df.exe" elif sys.platform.startswith("darwin"): - fname = "mp7_mac_20230911_8eca8d8" + fname = "mp7_mac_20231016_86b38df" elif sys.platform.startswith("linux"): - fname = "mp7_linux_20230911_8eca8d8" + fname = "mp7_linux_20231016_86b38df" else: raise (OSError(f"Unknown platform: {sys.platform}")) url = "https://github.com/MODFLOW-USGS/modpath-v7/raw/develop/msvs/bin_PROVISIONAL" diff --git a/tests/test_002_regis_geotop.py b/tests/test_002_regis_geotop.py index 423593bb..81bf1bce 100644 --- a/tests/test_002_regis_geotop.py +++ b/tests/test_002_regis_geotop.py @@ -1,3 +1,4 @@ +import matplotlib.pyplot as plt import nlmod @@ -21,8 +22,13 @@ def test_get_regis_botm_layer_BEk1( def test_get_geotop(extent=[98600.0, 99000.0, 489400.0, 489700.0]): geotop_ds = nlmod.read.geotop.get_geotop(extent) line = [(extent[0], extent[2]), (extent[1], extent[3])] - # also test the plot-method - nlmod.plot.geotop_lithok_in_cross_section(line, geotop_ds) + + # also test the plot-methods + f, ax = plt.subplots() + nlmod.plot.geotop_lithok_in_cross_section(line, geotop_ds, ax=ax) + + f, ax = plt.subplots() + nlmod.plot.geotop_lithok_on_map(geotop_ds, z=-20.2, ax=ax) # @pytest.mark.skip(reason="too slow") diff --git a/tests/test_008_waterschappen.py b/tests/test_008_waterschappen.py index bd9a6a0b..4c15cad4 100644 --- a/tests/test_008_waterschappen.py +++ b/tests/test_008_waterschappen.py @@ -49,8 +49,11 @@ def test_download_peilgebieden(plot=True): cmap = "viridis" for wb in waterboards.index: if wb in gdf: - # gdf[wb].plot(ax=ax, zorder=0) - gdf[wb].plot("winter_stage", ax=ax, zorder=0, norm=norm, cmap=cmap) + try: + # gdf[wb].plot(ax=ax, zorder=0) + gdf[wb].plot("winter_stage", ax=ax, zorder=0, norm=norm, cmap=cmap) + except Exception as e: + print(f"plotting of {data_kind} for {wb} failed: {e}") c = waterboards.at[wb, "geometry"].centroid ax.text(c.x, c.y, wb.replace(" ", "\n"), ha="center", va="center") nlmod.plot.colorbar_inside( diff --git a/tests/test_009_layers.py b/tests/test_009_layers.py index ac1d44b6..ba6d359d 100644 --- a/tests/test_009_layers.py +++ b/tests/test_009_layers.py @@ -1,7 +1,7 @@ import os -import matplotlib.pyplot as plt import numpy as np +import matplotlib.pyplot as plt from shapely.geometry import LineString import nlmod @@ -37,14 +37,14 @@ def compare_layer_models( else: fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(14, 12), sharex=True) dcs1 = DatasetCrossSection(ds1, line=line, ax=ax1, zmin=zmin, zmax=zmax) - polys2 = dcs1.plot_layers(colors=colors, min_label_area=min_label_area) + dcs1.plot_layers(colors=colors, min_label_area=min_label_area) dcs1.plot_grid(linewidth=0.5, vertical=False) ax1.set_ylabel(ylabel) if ds2 is not None: ax1.set_title(title1) dcs2 = DatasetCrossSection(ds2, line=line, ax=ax2, zmin=zmin, zmax=zmax) - polys1 = dcs2.plot_layers(colors=colors, min_label_area=min_label_area) + dcs2.plot_layers(colors=colors, min_label_area=min_label_area) dcs2.plot_grid(linewidth=0.5, vertical=False) ax2.set_ylabel(ylabel) ax2.set_xlabel(xlabel) @@ -53,76 +53,147 @@ def compare_layer_models( ax1.set_xlabel(xlabel) -def plot_test(ds, ds_new): - line = LineString([(ds.extent[0], ds.extent[2]), (ds.extent[1], ds.extent[3])]) - colors = nlmod.read.regis.get_legend()["color"].to_dict() +def plot_test(ds, ds_new, line=None, colors=None): + if line is None: + line = LineString([(ds.extent[0], ds.extent[2]), (ds.extent[1], ds.extent[3])]) + if colors is None: + colors = nlmod.read.regis.get_legend()["color"].to_dict() compare_layer_models(ds, line, colors, ds_new) +def test_split_layers(plot=False): + regis = get_regis_horstermeer() + split_dict = {"PZWAz2": (0.3, 0.3, 0.4), "PZWAz3": (0.2, 0.2, 0.2, 0.2, 0.2)} + regis_split, split_reindexer = nlmod.layers.split_layers_ds( + regis, split_dict, return_reindexer=True + ) + + th_new = nlmod.layers.calculate_thickness(regis_split) + assert (th_new >= 0).all() + # make sure the total thickness of the two models is equal (does not assert to True yet) + # assert th_new.sum() == nlmod.layers.calculate_thickness(regis).sum() + + if plot: + colors = nlmod.read.regis.get_legend()["color"].to_dict() + for j, i in split_reindexer.items(): + if j not in colors: + colors[j] = colors[i] + plot_test(regis, regis_split, colors=colors) + + +def test_add_layer_dim_to_top(): + regis = get_regis_horstermeer() + ds = nlmod.layers.add_layer_dim_to_top(regis) + assert "layer" in ds["top"].dims + assert ds["botm"].isnull().any() + + +def test_combine_layers(plot=False): + regis = get_regis_horstermeer() + combine_layers = [ + tuple(np.argwhere(regis.layer.str.startswith("URz").data).squeeze().tolist()), + tuple( + np.argwhere(regis.layer.isin(["PZWAz2", "PZWAz3"]).data).squeeze().tolist() + ), + ] + regis_combined = nlmod.layers.combine_layers_ds( + regis, combine_layers, kD=None, c=None + ) + + th_new = nlmod.layers.calculate_thickness(regis_combined) + assert (th_new >= 0).all() + + if plot: + plot_test(regis, regis_combined) + + def test_set_layer_thickness(plot=False): regis = get_regis_horstermeer() - ds = nlmod.to_model_ds(regis) + ds_new = nlmod.layers.set_layer_thickness(regis.copy(deep=True), "WAk1", 10.0) - ds_new = nlmod.layers.set_layer_thickness(ds.copy(deep=True), "WAk1", 10.0) - th = nlmod.layers.calculate_thickness(ds_new) - assert (th >= 0).all() - # assert (th.sel(layer="WAk1") == 10.0).all() + th_new = nlmod.layers.calculate_thickness(ds_new) + assert (np.abs(th_new.sel(layer="WAk1") - 10.0) < 0.001).all() + assert (th_new >= 0).all() + assert th_new.sum() == nlmod.layers.calculate_thickness(ds_new).sum() if plot: - plot_test(ds, ds_new) + plot_test(regis, ds_new) def test_set_minimum_layer_thickness(plot=False): regis = get_regis_horstermeer() - ds = nlmod.to_model_ds(regis) + ds_new = nlmod.layers.set_minimum_layer_thickness( + regis.copy(deep=True), "WAk1", 5.0 + ) - ds_new = nlmod.layers.set_minimum_layer_thickness(ds.copy(deep=True), "WAk1", 5.0) - th = nlmod.layers.calculate_thickness(ds_new) - assert (th >= 0).all() - # assert (th.sel(layer="WAk1") == 10.0).all() + th_new = nlmod.layers.calculate_thickness(ds_new) + assert (th_new.sel(layer="WAk1") > 5.0 - 0.001).all() + assert (th_new >= 0).all() + assert th_new.sum() == nlmod.layers.calculate_thickness(ds_new).sum() if plot: - plot_test(ds, ds_new) + plot_test(regis, ds_new) -def test_set_layer_top(plot=False): +def test_set_model_top(plot=False): regis = get_regis_horstermeer() - ds = nlmod.to_model_ds(regis) + ds_new = nlmod.layers.set_model_top(regis.copy(deep=True), 5.0) + + assert (ds_new["top"] == 5.0).all() + th_new = nlmod.layers.calculate_thickness(ds_new) + assert (th_new >= 0).all() + if plot: + plot_test(regis, ds_new) + - ds_new = nlmod.layers.set_layer_top(ds.copy(deep=True), "WAk1", -40.0) - assert (nlmod.layers.calculate_thickness(ds_new) >= 0).all() +def test_set_layer_top(plot=False): + regis = get_regis_horstermeer() + ds_new = nlmod.layers.set_layer_top(regis.copy(deep=True), "WAk1", -40.0) + th_new = nlmod.layers.calculate_thickness(ds_new) + top_new = ds_new["botm"].loc["WAk1"] + th_new.loc["WAk1"] + assert (top_new == -40).all() + assert (th_new >= 0).all() + assert th_new.sum() == nlmod.layers.calculate_thickness(ds_new).sum() if plot: - plot_test(ds, ds_new) + plot_test(regis, ds_new) def test_set_layer_botm(plot=False): regis = get_regis_horstermeer() - ds = nlmod.to_model_ds(regis) - - ds_new = nlmod.layers.set_layer_botm(ds.copy(deep=True), "WAk1", -75.0) - assert (nlmod.layers.calculate_thickness(ds_new) >= 0).all() + ds_new = nlmod.layers.set_layer_botm(regis.copy(deep=True), "WAk1", -75.0) + th_new = nlmod.layers.calculate_thickness(ds_new) + assert (ds_new["botm"].loc["WAk1"] == -75).all() + assert (th_new >= 0).all() + assert th_new.sum() == nlmod.layers.calculate_thickness(ds_new).sum() if plot: - plot_test(ds, ds_new) + plot_test(regis, ds_new) def test_insert_layer(): + # download regis ds1 = get_regis_horstermeer() + # just replace the 2nd layer by a new insertion layer = ds1.layer.data[1] new_layer = "test" + ds1_top3d = ds1["botm"] + nlmod.layers.calculate_thickness(ds1) ds2 = nlmod.layers.insert_layer( - ds1, "test", ds1["top"].loc[layer], ds1["botm"].loc[layer] + ds1, "test", ds1_top3d.loc[layer], ds1["botm"].loc[layer] ) + + # make sure the total thickness of the two models is equal (does not assert to True yet) # total_thickness1 = float(nlmod.layers.calculate_thickness(ds1).sum()) # total_thickness2 = float(nlmod.layers.calculate_thickness(ds2).sum()) # assert total_thickness1 == total_thickness2 + + # msake sure the original layer has no thickness left assert nlmod.layers.calculate_thickness(ds2).loc[layer].sum() == 0 - mask = ~np.isnan(ds1["top"].loc[layer]) - assert ( - ds2["top"].loc[new_layer].data[mask] == ds1["top"].loc[layer].data[mask] - ).all() - assert ( - ds2["botm"].loc[new_layer].data[mask] == ds1["botm"].loc[layer].data[mask] - ).all() + + # make sure the top of the new layer is equal to the top of the original layer + ds2_top3d = ds2["botm"] + nlmod.layers.calculate_thickness(ds2) + assert (ds2_top3d.loc[new_layer] == ds1_top3d.loc[layer]).all() + + # make sure the top of the new layer is equal to the top of the original layer + assert (ds2["botm"].loc[new_layer] == ds1["botm"].loc[layer]).all() diff --git a/tests/test_012_plot.py b/tests/test_012_plot.py index 23d14271..7dd08e80 100644 --- a/tests/test_012_plot.py +++ b/tests/test_012_plot.py @@ -29,3 +29,26 @@ def test_plot_data_array_vertex(): def test_plot_get_map(): nlmod.plot.get_map([100000, 101000, 400000, 401000], background=True, figsize=3) + + +def test_map_array(): + ds = util.get_ds_structured() + gwf = util.get_gwf(ds) + nlmod.plot.flopy.map_array(ds["kh"], gwf, ilay=0) + + +def test_flopy_contour_array(): + ds = util.get_ds_structured() + gwf = util.get_gwf(ds) + nlmod.plot.flopy.contour_array(ds["kh"], gwf, ilay=0) + + +def test_flopy_animate_map(): + # no test implemented yet + pass + + +def test_flopy_facet_plot(): + ds = util.get_ds_structured() + gwf = util.get_gwf(ds) + nlmod.plot.flopy.facet_plot(gwf, ds["kh"]) diff --git a/tests/test_013_surface_water.py b/tests/test_013_surface_water.py index d05ed8d0..aeca6233 100644 --- a/tests/test_013_surface_water.py +++ b/tests/test_013_surface_water.py @@ -9,9 +9,8 @@ def test_gdf_to_seasonal_pkg(): model_name = "sw" model_ws = os.path.join("data", model_name) - ds = nlmod.get_ds( - [170000, 171000, 550000, 551000], model_ws=model_ws, model_name=model_name - ) + extent = [119000, 120000, 523000, 524000] + ds = nlmod.get_ds(extent, model_ws=model_ws, model_name=model_name) ds = nlmod.time.set_ds_time(ds, time=[365.0], start=pd.Timestamp.today()) gdf = nlmod.gwf.surface_water.get_gdf(ds) diff --git a/tests/test_022_gwt.py b/tests/test_022_gwt.py new file mode 100644 index 00000000..aaa017b2 --- /dev/null +++ b/tests/test_022_gwt.py @@ -0,0 +1,8 @@ +import nlmod + + +def test_prepare(): + ds = nlmod.get_ds([0, 1000, 0, 2000]) + ds = nlmod.gwt.prepare.set_default_transport_parameters( + ds, transport_type="chloride" + ) diff --git a/tests/test_023_hfb.py b/tests/test_023_hfb.py new file mode 100644 index 00000000..f7e2a73f --- /dev/null +++ b/tests/test_023_hfb.py @@ -0,0 +1,37 @@ +from shapely.geometry import LineString, Polygon +import geopandas as gpd +import flopy +import nlmod +import util + + +def test_get_hfb_spd(): + # this test also tests line2hfb + ds = util.get_ds_vertex() + ds = nlmod.time.set_ds_time(ds, "2023", time="2024") + gwf = util.get_gwf(ds) + + coords = [(0, 1000), (1000, 0)] + gdf = gpd.GeoDataFrame({"geometry": [LineString(coords)]}) + + spd = nlmod.gwf.horizontal_flow_barrier.get_hfb_spd(gwf, gdf, depth=5.0) + hfb = flopy.mf6.ModflowGwfhfb(gwf, stress_period_data={0: spd}) + + # also test the plot method + ax = gdf.plot() + nlmod.gwf.horizontal_flow_barrier.plot_hfb(hfb, gwf, ax=ax) + + +def test_polygon_to_hfb(): + ds = util.get_ds_vertex() + ds = nlmod.time.set_ds_time(ds, "2023", time="2024") + gwf = util.get_gwf(ds) + + coords = [(135, 230), (568, 170), (778, 670), (260, 786)] + gdf = gpd.GeoDataFrame({"geometry": [Polygon(coords)]}).reset_index() + + hfb = nlmod.gwf.horizontal_flow_barrier.polygon_to_hfb(gdf, ds, gwf=gwf) + + # also test the plot method + ax = gdf.plot() + nlmod.gwf.horizontal_flow_barrier.plot_hfb(hfb, gwf, ax=ax) diff --git a/tests/util.py b/tests/util.py index 67957a42..3d2c3265 100644 --- a/tests/util.py +++ b/tests/util.py @@ -21,3 +21,12 @@ def get_ds_vertex(extent=None, line=None, **kwargs): refinement_features = [([LineString(line)], "line", 1)] ds = nlmod.grid.refine(ds, model_ws, refinement_features=refinement_features) return ds + + +def get_gwf(ds): + sim = nlmod.sim.sim(ds) + if "time" in ds.variables: + nlmod.sim.tdis(ds, sim) + gwf = nlmod.gwf.gwf(ds, sim) + nlmod.gwf.dis(ds, gwf) + return gwf