Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Buisdrainage #276

Merged
merged 13 commits into from
Oct 23, 2023
9 changes: 9 additions & 0 deletions nlmod/dims/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,8 @@ def set_model_top(ds, top, min_thickness=0.0):
"Either add attribute or use nlmod functions to build the dataset."
)
)
if "layer" in ds["top"].dims:
raise (ValueError("set_model_top does not support top with a layer dimension"))
if isinstance(top, (float, int)):
top = xr.full_like(ds["top"], top)
if not top.shape == ds["top"].shape:
Expand All @@ -712,6 +714,8 @@ def set_model_top(ds, top, min_thickness=0.0):
def set_layer_top(ds, layer, top):
"""Set the top of a layer."""
assert layer in ds.layer
if "layer" in ds["top"].dims:
raise (ValueError("set_layer_top does not support top with a layer dimension"))
lay = np.where(ds.layer == layer)[0][0]
if lay == 0:
# change the top of the model
Expand All @@ -735,6 +739,8 @@ def set_layer_top(ds, layer, top):
def set_layer_botm(ds, layer, botm):
"""Set the bottom of a layer."""
assert layer in ds.layer
if "layer" in ds["top"].dims:
raise (ValueError("set_layer_botm does not support top with a layer dimension"))
lay = np.where(ds.layer == layer)[0][0]
# if lay > 0 and np.any(botm > ds["botm"][lay - 1]):
# raise (Exception("set_layer_botm cannot change botm of higher layers yet"))
Expand Down Expand Up @@ -784,6 +790,9 @@ def remove_thin_layers(ds, min_thickness=0.1):

THe thickness of the removed cells is added to the first active layer below
"""
if "layer" in ds["top"].dims:
msg = "remove_thin_layers does not support top with a layer dimension"
raise (ValueError(msg))
thickness = calculate_thickness(ds)
for lay_org in range(len(ds.layer)):
# determine where the layer is too thin
Expand Down
4 changes: 2 additions & 2 deletions nlmod/dims/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ def structured_da_to_ds(da, ds, method="average", nodata=np.NaN):
Parameters
----------
da : xarray.DataArray
THe data-array to be resampled, with dimensions x and y.
The data-array to be resampled, with dimensions x and y.
ds : xarray.Dataset
The model dataset.
method : string or rasterio.enums.Resampling, optional
Expand All @@ -457,7 +457,7 @@ def structured_da_to_ds(da, ds, method="average", nodata=np.NaN):
method is 'linear' or 'nearest' da.interp() is used. Otherwise
da.rio.reproject_match() is used. The default is "average".
nodata : float, optional
THe nodata value in input and output. THe default is np.NaN.
The nodata value in input and output. The default is np.NaN.

Returns
-------
Expand Down
7 changes: 6 additions & 1 deletion nlmod/dims/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,9 @@ def dataframe_to_flopy_timeseries(
filename=None,
time_series_namerecord=None,
interpolation_methodrecord="stepwise",
append=False,
):
assert not df.isna().any(axis=None)
if ds is not None:
# set index to days after the start of the simulation
df = df.copy()
Expand All @@ -519,7 +521,10 @@ def dataframe_to_flopy_timeseries(

if isinstance(interpolation_methodrecord, str):
interpolation_methodrecord = [interpolation_methodrecord] * len(df.columns)
package.ts.initialize(

# initialize or append a new package
method = package.ts.append_package if append else package.ts.initialize
method(
filename=filename,
timeseries=timeseries,
time_series_namerecord=time_series_namerecord,
Expand Down
4 changes: 0 additions & 4 deletions nlmod/gwf/gwf.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,10 +215,6 @@ def _disv(ds, model, length_units="METERS", pname="disv", **kwargs):
xorigin = ds.attrs["xorigin"]
yorigin = ds.attrs["yorigin"]
angrot = ds.attrs["angrot"]
elif "extent" in ds.attrs.keys():
xorigin = ds.attrs["extent"][0]
yorigin = ds.attrs["extent"][2]
angrot = 0.0
else:
xorigin = 0.0
yorigin = 0.0
Expand Down
4 changes: 3 additions & 1 deletion nlmod/gwf/surface_water.py
Original file line number Diff line number Diff line change
Expand Up @@ -1024,8 +1024,10 @@ def add_season_timeseries(
package,
summer_months=(4, 5, 6, 7, 8, 9),
filename="season.ts",
seasons=("winter", "summer"),
seasons=None,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why set seasons to None? The keyword summer_months sort of suggests winter/summer being the only possible two seasons?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed this as a tuple gave a bug, seasons (time_series_namerecord) somehow needed to be a list, and it is not allowed to to add a list as an optional arggument. Therefore when seasons is None, I set it to seasons = ["winter", "summer"]. Maybe better to add two arguments, called winter_name and summer_name?

):
if seasons is None:
seasons = ["winter", "summer"]
tmin = pd.to_datetime(ds.time.start)
if tmin.month in summer_months:
ts_data = [(0.0, 0.0, 1.0)]
Expand Down
16 changes: 12 additions & 4 deletions nlmod/modpath/modpath.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,15 @@ def pg_from_pd(nodes, localx=0.5, localy=0.5, localz=0.5):


def sim(
mpf, particlegroups, direction="backward", gwf=None, ref_time=None, stoptime=None
mpf,
particlegroups,
direction="backward",
gwf=None,
ref_time=None,
stoptime=None,
simulationtype="combined",
weaksinkoption="pass_through",
weaksourceoption="pass_through",
):
"""Create a modpath backward simulation from a particle group.

Expand Down Expand Up @@ -472,10 +480,10 @@ def sim(

mpsim = flopy.modpath.Modpath7Sim(
mpf,
simulationtype="combined",
simulationtype=simulationtype,
trackingdirection=direction,
weaksinkoption="pass_through",
weaksourceoption="pass_through",
weaksinkoption=weaksinkoption,
weaksourceoption=weaksourceoption,
referencetime=ref_time,
stoptimeoption=stoptimeoption,
stoptime=stoptime,
Expand Down
1 change: 1 addition & 0 deletions nlmod/read/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
rws,
waterboard,
webservices,
nhi,
)
from .geotop import get_geotop
from .regis import get_regis
2 changes: 1 addition & 1 deletion nlmod/read/knmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def _add_ts_to_ds(timeseries, loc_sel, variable, ds):
"""Add a timeseries to a variable at location loc_sel in model DataSet."""
end = pd.Timestamp(ds.time.data[-1])
if timeseries.index[-1] < end:
raise ValueError(f"no recharge available at {timeseries.name} for date {end}")
raise ValueError(f"no data available at {timeseries.name} for date {end}")

# fill recharge data array
model_recharge = pd.Series(index=ds.time, dtype=float)
Expand Down
88 changes: 88 additions & 0 deletions nlmod/read/nhi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import os
import requests
import logging
import numpy as np
import rioxarray
from ..dims.resample import structured_da_to_ds

logger = logging.getLogger(__name__)


def download_file(url, pathname, filename=None, overwrite=False, timeout=120):
if filename is None:
filename = url.split("/")[-1]
fname = os.path.join(pathname, filename)
if overwrite or not os.path.isfile(fname):
logger.info(f"Downloading {filename}")
r = requests.get(url, allow_redirects=True, timeout=timeout)
with open(fname, "wb") as file:
file.write(r.content)
return fname


def download_buisdrainage(pathname, overwrite=False):
url_bas = "https://thredds.data.nhi.nu/thredds/fileServer/opendap/models/nhi3_2/25m"

# download resistance
url = f"{url_bas}/buisdrain_c_ras25/buisdrain_c_ras25.nc"
fname_c = download_file(url, pathname, overwrite=overwrite)

# download drain depth
url = f"{url_bas}/buisdrain_d_ras25/buisdrain_d_ras25.nc"
fname_d = download_file(url, pathname, overwrite=overwrite)

return fname_c, fname_d


def add_buisdrainage(
ds,
pathname=None,
cond_var="buisdrain_cond",
depth_var="buisdrain_depth",
cond_method="average",
depth_method="mode",
):
if pathname is None:
pathname = ds.cachedir
# download files if needed
fname_c, fname_d = download_buisdrainage(pathname)

# make sure crs is set of ds
if ds.rio.crs is None:
ds = ds.rio.write_crs(28992)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we use the EPSG definition in nlmod for this?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Somehow this does not give the same problems as with maps (I believe, but did not thoroughly check). It might break stuff, if we change this. If other data sources are also in 28992, they do not need to be transformed, but if we change the crs of the model Dataset to our own definition, an unwanted transformation is applied. The buisdrainage data is in WGS84, so you may be right. Let's check this in the future.


# use method = "average" on conductance, to account for locations without pipe
# drainage, where the conductance is 0
buisdrain_c = rioxarray.open_rasterio(fname_c, mask_and_scale=True)[0]
# calculate a conductance (per m2) from a resistance
cond = 1 / buisdrain_c
# set conductance to 0 where resistance is infinite or 0
cond = cond.where(~(np.isinf(cond) | np.isnan(cond)), 0.0)
cond = cond.rio.write_crs(buisdrain_c.rio.crs)
# resample to model grid
ds[cond_var] = structured_da_to_ds(cond, ds, method=cond_method)
# multiply by area to get a conductance
ds[cond_var] = ds[cond_var] * ds["area"]

# use mthod = "mode" on depth, to retreive the depth that occurs most in each cell
mask_and_scale = False
buisdrain_d = rioxarray.open_rasterio(fname_d, mask_and_scale=mask_and_scale)[0]
if mask_and_scale:
nodata = np.nan
else:
nodata = buisdrain_d.attrs["_FillValue"]
# set buisdrain_d to nodata where it is 0
mask = buisdrain_d != 0
buisdrain_d = buisdrain_d.where(mask, nodata).rio.write_crs(buisdrain_d.rio.crs)
# resample to model grid
ds[depth_var] = structured_da_to_ds(
buisdrain_d, ds, method=depth_method, nodata=nodata
)
if not mask_and_scale:
# set nodata values to NaN
ds[depth_var] = ds[depth_var].where(ds[depth_var] != nodata)

# from cm to m
ds[depth_var] = ds[depth_var] / 100.0

return ds
14 changes: 12 additions & 2 deletions tests/test_005_external_data.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import test_001_model

import pandas as pd
import nlmod
import pytest


def test_get_recharge():
Expand All @@ -13,7 +14,8 @@ def test_get_recharge():

def test_get_reacharge_most_common():
# model with sea
ds = test_001_model.get_ds_from_cache("basic_sea_model")
ds = nlmod.get_ds([100000, 110000, 420000, 430000])
ds = nlmod.time.set_ds_time(ds, start="2021", time=pd.date_range("2022", "2023"))

# add knmi recharge to the model dataset
ds.update(nlmod.read.knmi.get_recharge(ds, most_common_station=True))
Expand All @@ -31,6 +33,14 @@ def test_get_recharge_steady_state():
ds.update(nlmod.read.knmi.get_recharge(ds))


def test_get_recharge_not_available():
ds = nlmod.get_ds([100000, 110000, 420000, 430000])
time = pd.date_range("2010", pd.Timestamp.now())
ds = nlmod.time.set_ds_time(ds, start="2000", time=time)
with pytest.raises(ValueError):
ds.update(nlmod.read.knmi.get_recharge(ds))


def test_ahn_within_extent():
extent = [95000.0, 105000.0, 494000.0, 500000.0]
da = nlmod.read.ahn.get_ahn_from_wcs(extent)
Expand Down
22 changes: 22 additions & 0 deletions tests/test_021_nhi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
import numpy as np
import tempfile
import nlmod
import pytest

tmpdir = tempfile.gettempdir()


@pytest.mark.slow
def test_buidrainage():
model_ws = os.path.join(tmpdir, "buidrain")
ds = nlmod.get_ds([110_000, 130_000, 435_000, 445_000], model_ws=model_ws)
ds = nlmod.read.nhi.add_buisdrainage(ds)

# assert that all locations with a specified depth also have a positive conductance
mask = ~ds["buisdrain_depth"].isnull()
assert np.all(ds["buisdrain_cond"].data[mask] > 0)

# assert that all locations with a positive conductance also have a specified depth
mask = ds["buisdrain_cond"] > 0
assert np.all(~np.isnan(ds["buisdrain_depth"].data[mask]))