diff --git a/README.md b/README.md index ac6b18e3..337ab2a7 100644 --- a/README.md +++ b/README.md @@ -10,20 +10,12 @@ Python package with functions to process, build and visualise MODFLOW models in the Netherlands. The functions in nlmod have four main objectives: -1. Create and adapt the temporal and spatial discretization of a MODFLOW model using an xarray Dataset. These functions are contained in `nlmod.dims` -2. Read data from external sources, project this data on the modelgrid and add this data to an xarray Dataset. These functions are contained in `nlmod.read` -3. Use data in an xarray Dataset to build modflow packages using flopy. The functions for modflow 6 packages are in `nlmod.gwf`, for modpath in `nlmod.modpath`. -4. Visualise modeldata in Python or GIS software. These functions are contained in `nlmod.plot` and `nlmod.gis`. - -External data sources that can be read are: -- AHN, digital elevation model -- bgt, surface water level geometries -- Geotop, subsurface model -- Jarkus, bathymetry -- KNMI, precipitation and evaporation -- REGIS, subsurface model -- Rijkswaterstaat, surface water polygons -- multiple waterboards, surface water level data +1. Create and adapt the temporal and spatial discretization of a MODFLOW model using an xarray Dataset (`nlmod.dims`). +2. Download and read data from external sources, project this data on the modelgrid and add this data to an xarray Dataset (`nlmod.read`). +3. Use data in an xarray Dataset to build modflow packages using FloPy (`nlmod.gwf` for Modflow 6 and `nlmod.modpath` for Modpath). +4. Visualise modeldata in Python (`nlmod.plot`) or GIS software (`nlmod.gis`). + +More information can be found on the documentation-website: https://nlmod.readthedocs.io/. ## Installation diff --git a/docs/examples/07_gridding_vector_data.ipynb b/docs/examples/07_gridding_vector_data.ipynb new file mode 100644 index 00000000..350c2e32 --- /dev/null +++ b/docs/examples/07_gridding_vector_data.ipynb @@ -0,0 +1,571 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "# Gridding vector data \n", + "\n", + "Vector data can be points, lines or polygons often saved as shapefiles and visualised using GIS software. A common operation is to project vector data on a modelgrid. For example, to add a surface water line to a grid. In this section we present some functions in `nlmod` to project vector data on a modelgrid and to aggregate vector data to model cells." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import nlmod\n", + "from nlmod import resample\n", + "import numpy as np\n", + "import xarray as xr\n", + "import flopy\n", + "import warnings\n", + "\n", + "\n", + "from matplotlib.colors import Normalize\n", + "from matplotlib.patches import Polygon\n", + "from matplotlib.collections import PatchCollection\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import geopandas as gpd\n", + "from shapely.geometry import LineString, Point\n", + "from shapely.geometry import Polygon as shp_polygon\n", + "from scipy.interpolate import RectBivariateSpline\n", + "\n", + "from IPython.display import display" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Grid types\n", + "\n", + "We create the same two grids as in the 'Resampling raster data' notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# structured grid\n", + "ds = nlmod.get_ds([950, 1250, 20050, 20350], delr=100)\n", + "# vertex grid\n", + "dsv = nlmod.grid.refine(ds, refinement_features=[([Point(1200, 20200)], 'point', 1)], model_ws='model7')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Vector to grid\n", + "\n", + "Vector data can be points, lines or polygons often saved as shapefiles and visualised using GIS software. A common operation is to project vector data on a modelgrid. For example to add a surface water line to a grid. Here are some functions in `nlmod` to project vector data on a modelgrid." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "point_geom = [Point(x,y) for x, y in zip([1000, 1200, 1225, 1300],[20200, 20175, 20175, 20425])]\n", + "point_gdf = gpd.GeoDataFrame({'values':[1,52,66,24]}, geometry=point_geom)\n", + "line_geom = [LineString([point_geom[0], point_geom[1]]),\n", + " LineString([point_geom[2], point_geom[3]]),\n", + " LineString([point_geom[0], point_geom[3]])]\n", + "line_gdf = gpd.GeoDataFrame({'values':[1,52,66]}, geometry=line_geom)\n", + "pol_geom = [shp_polygon([[p.x, p.y] for p in [point_geom[0], point_geom[1], point_geom[2], point_geom[3]]]),\n", + " shp_polygon([[p.x, p.y] for p in [point_geom[0], point_geom[1], point_geom[2], Point(1200,20300)]])]\n", + "pol_gdf = gpd.GeoDataFrame({'values':[166, 5]}, geometry=pol_geom)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots()\n", + "nlmod.plot.modelgrid_from_ds(ds).plot(ax=ax)\n", + "point_gdf.plot(ax=ax, color='green')\n", + "line_gdf.plot(ax=ax, color='purple')\n", + "pol_gdf.plot(ax=ax, alpha=0.6)\n", + "\n", + "ax.set_xlim(ax.get_xlim()[0], 1400)\n", + "ax.set_ylim(ax.get_ylim()[0], 20500)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Points" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Aggregation methods" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", + "\n", + "da1 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='min')\n", + "da2 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='max')\n", + "da3 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='mean')\n", + "\n", + "vmin = min(da1.min(), da2.min(), da3.min())\n", + "vmax = max(da1.max(), da2.max(), da3.max())\n", + "\n", + "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", + "axes[0].set_title('aggregation min')\n", + "axes[0].axis('scaled')\n", + "\n", + "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", + "axes[1].set_title('aggregation max')\n", + "axes[1].axis('scaled')\n", + "\n", + "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", + "axes[2].set_title('aggregation mean')\n", + "axes[2].axis('scaled')\n", + "\n", + "point_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", + "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", + "axes[3].set_title('points')\n", + "axes[3].axis('scaled');" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Interpolation methods" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, axes = plt.subplots(ncols=3, figsize=(15,5))\n", + "ds.attrs['model_ws'] = ''\n", + "sim = nlmod.sim.sim(ds)\n", + "gwf = nlmod.gwf.gwf(ds, sim)\n", + "dis = nlmod.gwf.dis(ds, gwf)\n", + "da1 = nlmod.grid.gdf_to_data_array_struc(point_gdf, gwf, field='values', interp_method='nearest')\n", + "da2 = nlmod.grid.gdf_to_data_array_struc(point_gdf, gwf, field='values', interp_method='linear')\n", + "\n", + "vmin = min(da1.min(), da2.min())\n", + "vmax = max(da1.max(), da2.max())\n", + "\n", + "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", + "axes[0].set_title('interpolation nearest')\n", + "axes[0].axis('scaled')\n", + "\n", + "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", + "axes[1].set_title('interpolation linear')\n", + "axes[1].axis('scaled')\n", + "\n", + "\n", + "point_gdf.plot('values', ax=axes[2], vmin=vmin, vmax=vmax, legend=True)\n", + "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[2])\n", + "axes[2].set_title('points')\n", + "axes[2].axis('scaled')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Lines" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", + "\n", + "da1 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='max_length')\n", + "da2 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='length_weighted')\n", + "da3 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='mean')\n", + "\n", + "vmin = min(da1.min(), da2.min(), da3.min())\n", + "vmax = max(da1.max(), da2.max(), da3.max())\n", + "\n", + "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", + "axes[0].set_title('aggregation max_length')\n", + "axes[0].axis('scaled')\n", + "\n", + "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", + "axes[1].set_title('aggregation length_weighted')\n", + "axes[1].axis('scaled')\n", + "\n", + "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", + "axes[2].set_title('aggregation mean')\n", + "axes[2].axis('scaled')\n", + "\n", + "line_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", + "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", + "axes[3].set_title('lines')\n", + "axes[3].axis('scaled')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Polygons" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", + "\n", + "da1 = nlmod.grid.gdf_to_da(pol_gdf, ds, 'values', agg_method='max_area')\n", + "da2 = nlmod.grid.gdf_to_da(pol_gdf, ds, 'values', agg_method='area_weighted')\n", + "da3 = nlmod.grid.gdf_to_data_array_struc(pol_gdf, gwf, 'values', agg_method='nearest')\n", + "\n", + "vmin = min(da1.min(), da2.min(), da3.min())\n", + "vmax = max(da1.max(), da2.max(), da3.max())\n", + "\n", + "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", + "axes[0].set_title('aggregation max_area')\n", + "axes[0].axis('scaled')\n", + "\n", + "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", + "axes[1].set_title('aggregation area_weighted')\n", + "axes[1].axis('scaled')\n", + "\n", + "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", + "axes[2].set_title('aggregation nearest')\n", + "axes[2].axis('scaled')\n", + "\n", + "pol_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", + "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", + "axes[3].set_title('polygons')\n", + "axes[3].axis('scaled');" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Intersect vector data with grid" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gdf_point_grid = nlmod.grid.gdf_to_grid(point_gdf, ds)\n", + "gdf_line_grid = nlmod.grid.gdf_to_grid(line_gdf, ds)\n", + "gdf_pol_grid = nlmod.grid.gdf_to_grid(pol_gdf, ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots()\n", + "\n", + "gdf_point_grid.plot(ax=ax, color='green')\n", + "gdf_line_grid['ind'] = range(gdf_line_grid.shape[0])\n", + "gdf_line_grid.plot('ind', ax=ax, cmap='jet')\n", + "gdf_pol_grid['ind'] = range(gdf_pol_grid.shape[0])\n", + "gdf_pol_grid.plot('ind',ax=ax, alpha=0.6)\n", + "\n", + "nlmod.grid.modelgrid_from_ds(ds).plot(ax=ax)\n", + "ax.set_xlim(ax.get_xlim()[0], 1300)\n", + "ax.set_ylim(ax.get_ylim()[0], 20400);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Aggregate parameters per model cell\n", + "\n", + "Aggregatie options:\n", + "- point: max, min, mean\n", + "- line: max, min, length_weighted, max_length\n", + "- polygon: max, min, area_weighted, area_max\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# point\n", + "display(gdf_point_grid)\n", + "nlmod.grid.aggregate_vector_per_cell(gdf_point_grid,{'values':'max'})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# line\n", + "display(gdf_line_grid)\n", + "nlmod.grid.aggregate_vector_per_cell(gdf_line_grid,{'values':'length_weighted'})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# polygon\n", + "display(gdf_pol_grid)\n", + "nlmod.grid.aggregate_vector_per_cell(gdf_pol_grid,{'values':'area_weighted'})" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Grid to reclist\n", + "For some modflow packages (drn, riv, ghb, wel) you need to specify stress_period_data to create them using flopy. This stress_period_data consists of reclists (also called lrcd for a structured grid) for every time step. \n", + "\n", + "The function `da_to_reclist` can be used to convert grid data (both structured and vertex) to a reclist. This function has many arguments:\n", + "- `mask`, boolean DataArray to determine which cells should be added to the reclist. Can be 2d or 3d.\n", + "- `layer`, if `mask` is a 2d array the value of `layer` is used in the reclist. If `mask` is 3d or `first_active_layer` is True the `layer` argument is ignored.\n", + "- `only_active_cells`, if True only add cells with an idomain of 1 to the reclist\n", + "- `first_active_layer`, if True use the first active layer, obtained from the idomain, as the layer for each cell.\n", + "- `col1`,`col2` and `col3`, The column data of the reclist.\n", + "\n", + "The examples below show the result of each argument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# add layer dimension\n", + "if 'layer' not in ds.dims:\n", + " ds = ds.expand_dims({'layer':range(3)})\n", + "\n", + "# create some data arrays\n", + "ds['da1'] = ('layer', 'y','x'), np.random.randint(0,10,(ds.dims['layer'], ds.dims['y'],ds.dims['x']))\n", + "ds['da2'] = ('y','x'), np.random.randint(0,10,(ds.dims['y'],ds.dims['x']))\n", + "ds['da3'] = ('y','x'), np.random.randint(0,10,(ds.dims['y'],ds.dims['x']))\n", + "\n", + "# add a nodata value\n", + "ds.attrs['nodata'] = -999\n", + "\n", + "# create an idomain of ones except for the first cell which is zero\n", + "idomain = np.ones((ds.dims['layer'], ds.dims['y'],ds.dims['x']))\n", + "idomain[0,0,0] = 0\n", + "ds['idomain'] = ('layer','y','x'), idomain" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Mask and layer\n", + "If `mask` is a 2d array, the `layer` argument specifies the layer that is used in the reclist." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# structured 2d grid to reclist\n", + "mask2d = ds['da2'] == ds['da2'][0,0]\n", + "reclist1 = nlmod.grid.da_to_reclist(ds, mask2d, col1=ds['da1'][0], col2='da2', layer=0, only_active_cells=False)\n", + "reclist1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If the `mask` is three dimensional the `layer` argument is ignored." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# create a 3dmask \n", + "mask3d = ds['da1'] == ds['da1'].values[0,0,0]\n", + "\n", + "# use this mask to create the reclist\n", + "reclist2 = nlmod.grid.da_to_reclist(ds, mask3d, col1='da1',col2=100, layer=0, only_active_cells=False)\n", + "reclist2" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Only active cells\n", + "With `only_active_cells=True` we make sure only active cells end up in the reclist. Which cells are active is based on the `idomain` in the model dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Only return the cells with an active idomain \n", + "reclist3 = nlmod.grid.da_to_reclist(ds, mask3d, col1='da1',col2=100, only_active_cells=True)\n", + "reclist3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# also possible for a 2d grid\n", + "mask2d = ds['da2'] == ds['da2'][0,0]\n", + "reclist1 = nlmod.grid.da_to_reclist(ds, mask2d, col1=ds['da1'][0], col2='da2', layer=0, only_active_cells=True)\n", + "reclist1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### First active_layer\n", + "Use `first_active_layer=True` to add the first active layer to the reclist. The first active layer is obtained from the idomain." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# create a reclist with col1 (str), col2 (DataArray), col3 (int)\n", + "reclist4 = nlmod.grid.da_to_reclist(ds, mask2d, col1='da2',col2='da3', first_active_layer=True)\n", + "reclist4" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reclist columns\n", + "The `col1`, `col2` and `col3` arguments specify what data should be listed in the reclist. The types can be `str`,`xarray.DataArray`,`None` or other. If the value is a `str` the corresponding DataArray from the Dataset is used to get data for the reclist. If the value is an `xarray.DataArray` the DataArray is used. If the value is `None` the column is not added to the reclist and if the value is from another type the value is used for every record in the reclist.\n", + "\n", + "Be aware that if `mask` is a 3d array, the DataArrays of the column should also be 3d." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# create a reclist with col1 (str), col2 (DataArray), col3 (int)\n", + "reclist5 = nlmod.grid.da_to_reclist(ds, mask3d, col1=ds['idomain'],col2='da1',col3=9, layer=0, only_active_cells=False)\n", + "reclist5" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Vertex model to reclist" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# add some random DataArray to the vertex dataset\n", + "da_vert = np.random.randint(0,10,(dsv['area'].shape))\n", + "dsv['da_vert'] = ('icell2d'), da_vert\n", + "\n", + "# create rec list from a vertex dataset\n", + "mask_vert = dsv['da_vert'] == dsv['da_vert'][0]\n", + "reclist6 = nlmod.grid.da_to_reclist(dsv, mask_vert, col1='da_vert',col2=2330, only_active_cells=False)\n", + "reclist6" + ] + } + ], + "metadata": { + "CodeCell": { + "cm_config": { + "lineWrapping": true + } + }, + "MarkdownCell": { + "cm_config": { + "lineWrapping": true + } + }, + "anaconda-cloud": {}, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.8" + }, + "vscode": { + "interpreter": { + "hash": "dace5e1b41a98a8e52d2a8eebc3b981caf2c12e7a76736ebfb89a489e3b62e79" + } + }, + "widgets": { + "state": {}, + "version": "1.1.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/examples/07_resampling.ipynb b/docs/examples/07_resampling.ipynb index 3c3865a8..589e2eea 100644 --- a/docs/examples/07_resampling.ipynb +++ b/docs/examples/07_resampling.ipynb @@ -7,26 +7,11 @@ "\n", "\n", "\n", - "# Resampling \n", + "# Resampling raster data\n", "\n", "Resampling data is a very common operation when building a Modflow model. Usually it is used to project data from one grid onto the other. There are many different ways to do this. This notebook shows some examples of resampling methods that are incorporated in the `nlmod` package. These methods rely heavily on resampling methods in packages such as `rioxarray` and `scipy.interpolate`." ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Contents\n", - "1. [Grid types](#gridtypes)\n", - "2. [Structured grid to fine structured grid](#2)\n", - "3. [Structured grid to locally refined grid](#3)\n", - "4. [Locally refined grid to structured grid](#4)\n", - "5. [Fill nan values](#5)\n", - "6. [Vector to grid](#6)\n", - "7. [Grid to reclist (stress period data)](#7)\n", - "8. [Real world example](#8)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -69,7 +54,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## [1. Grid types](#TOC)\n", + "## Grid types\n", "\n", "So far two different gridtypes are supported in `nlmod`:\n", "- structured grids where the cellsize is fixed for all cells\n", @@ -82,7 +67,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### structured grid\n", + "### structured grid\n", "\n", "This structured grid has random numbers between 0 and 9. Has 10 x 10 cells." ] @@ -93,23 +78,19 @@ "metadata": {}, "outputs": [], "source": [ - "# structured grid 2d\n", - "x = np.arange(1000, 1300, 100)\n", - "y = np.arange(20300, 20000, -100)\n", - "data_2d = np.random.randint(0, 10, size=(len(y), len(x))).astype(float)\n", - "struc2d = xr.DataArray(data_2d, dims=('y', 'x'),\n", - " coords={'x': x,\n", - " 'y': y})\n", + "ds = nlmod.get_ds([950, 1250, 20050, 20350], delr=100)\n", + "ds['data'] = ('y', 'x'), np.random.rand(len(ds.y), len(ds.x)) * 10\n", + "\n", "fig, ax = plt.subplots()\n", "ax.set_aspect('equal')\n", - "qm = struc2d.plot(ax=ax, lw=0.1, edgecolor='k')" + "ds['data'].plot(ax=ax, lw=0.1, edgecolor='k');" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "##### structured grid with nan value" + "### structured grid with nan value" ] }, { @@ -118,18 +99,19 @@ "metadata": {}, "outputs": [], "source": [ - "struc2d_nan = struc2d.copy().astype(float)\n", - "struc2d_nan.values[0][1] = np.nan\n", + "ds['data_nan'] = ds['data'].copy()\n", + "ds['data_nan'].data[0, 1] = np.NaN\n", + "\n", "fig, ax = plt.subplots()\n", "ax.set_aspect('equal')\n", - "qm = struc2d_nan.plot(ax=ax, lw=0.1, edgecolor='k')" + "ds['data_nan'].plot(ax=ax, lw=0.1, edgecolor='k');" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### vertex grid" + "### vertex grid" ] }, { @@ -138,69 +120,19 @@ "metadata": {}, "outputs": [], "source": [ - "dx = 100\n", - "dy = 100\n", - "x = np.arange(1000, 1300, dx)\n", - "y = np.arange(20300, 20000, -dy)\n", - "split_cell_no = 5\n", - "\n", - "# create structured grid\n", - "xv, yv = np.meshgrid(x, y)\n", - "xvc = xv.ravel()\n", - "yvc = yv.ravel()\n", - "#xyi = np.stack((np.ravel(xv), np.ravel(yv)), axis=-1)\n", - "\n", - "# create vertices\n", - "vertices = np.ones((len(xvc), 4, 2))\n", - "for i, x, y in zip(range(len(vertices)),xvc, yvc):\n", - " vertices[i] = np.array([[x-(dx/2), y+(dy/2)],\n", - " [x+(dx/2), y+(dy/2)],\n", - " [x+(dx/2), y-(dy/2)],\n", - " [x-(dx/2), y-(dy/2)]])\n", - " \n", - "# remove refined cell from structured grid\n", - "split_cell_x, split_cell_y = xvc[split_cell_no], yvc[split_cell_no]\n", - "xvc = np.delete(xvc, split_cell_no, 0)\n", - "yvc = np.delete(yvc, split_cell_no, 0)\n", - "vertices = np.delete(vertices, split_cell_no, 0)\n", + "dsv = nlmod.grid.refine(ds, refinement_features=[([Point(1200, 20200)], 'point', 1)], model_ws='model7')\n", + "dsv['data'] = 'icell2d', np.random.rand(len(dsv.data))\n", "\n", - "# get cell centers of refined cell\n", - "x_refined = np.array([split_cell_x-(dx/4), split_cell_x+(dx/4), split_cell_x-(dx/4), split_cell_x+(dx/4)])\n", - "y_refined = np.array([split_cell_y+(dy/4), split_cell_y+(dy/4), split_cell_y-(dy/4), split_cell_y-(dy/4)])\n", - "\n", - "# get vertices of refined cell\n", - "vert_refined = np.ones((len(x_refined), 4, 2))\n", - "for i, x, y in zip(range(len(vert_refined)),x_refined, y_refined):\n", - " vert_refined[i] = np.array([[x-(dx/4), y+(dy/4)],\n", - " [x+(dx/4), y+(dy/4)],\n", - " [x+(dx/4), y-(dy/4)],\n", - " [x-(dx/4), y-(dy/4)]])\n", - "\n", - "# add refined cell to the grid and vertices\n", - "xvc = np.insert(xvc, split_cell_no, x_refined, axis=0)\n", - "yvc = np.insert(yvc, split_cell_no, y_refined, axis=0)\n", - "vertices = np.insert(vertices, split_cell_no, vert_refined, axis=0)\n", - "\n", - "# calculate_area\n", - "area_vertex = [(v[:,0].max() - v[:,0].min()) * (v[:,1].max() - v[:,1].min()) for v in vertices]\n", - "\n", - "# get cellid\n", - "icell2d = np.arange(len(xvc))\n", - "\n", - "# create values\n", - "values = np.random.randint(0, 10, size=len(icell2d)).astype(float)\n", - "\n", - "# create vertextured dataarray\n", - "coords = dict(x=xr.DataArray(xvc, dims=['icell2d',]), y=xr.DataArray(yvc, dims=['icell2d',]))\n", - "vertex1 = xr.DataArray(values, dims=('icell2d'), coords=coords)\n", - "nlmod.plot.plot_vertex_array(vertex1, vertices, gridkwargs={'edgecolor': 'k'});" + "fig, ax = plt.subplots()\n", + "ax.set_aspect('equal')\n", + "nlmod.plot.data_array(dsv['data'], ds=dsv, edgecolor='k');" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "#### vertex grid with nan" + "### vertex grid with nan" ] }, { @@ -209,17 +141,19 @@ "metadata": {}, "outputs": [], "source": [ - "vertex1_nan = vertex1.copy().astype(float)\n", - "vertex1_nan.values[7] = np.nan\n", + "dsv['data_nan'] = dsv['data'].copy()\n", + "dsv['data_nan'][7] = np.NaN\n", "\n", - "nlmod.plot.plot_vertex_array(vertex1_nan, vertices, gridkwargs={'edgecolor': 'k'});" + "fig, ax = plt.subplots()\n", + "ax.set_aspect('equal')\n", + "nlmod.plot.data_array(dsv['data_nan'], ds=dsv, edgecolor='k');" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## [2 Structured grid to fine structured grid](#TOC)" + "## Structured grid to fine structured grid" ] }, { @@ -228,8 +162,8 @@ "metadata": {}, "outputs": [], "source": [ - "# generate a model dataset\n", - "ds = nlmod.get_ds(extent=[950., 1250., 20050., 20350.], delr=50.)" + "# generate a finer model dataset\n", + "ds_fine = nlmod.get_ds(extent=[950., 1250., 20050., 20350.], delr=50.)" ] }, { @@ -262,11 +196,8 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear', 'cubic', 'average', 'min']:\n", - " if method == 'nearest':\n", - " struc_out = resample.structured_da_to_ds(struc2d, ds, method=method)\n", - " else:\n", - " struc_out = resample.structured_da_to_ds(struc2d.astype(float), ds, method=method) \n", - " compare_structured_data_arrays(struc2d, struc_out, method)" + " struc_out = resample.structured_da_to_ds(ds['data'], ds_fine, method=method) \n", + " compare_structured_data_arrays(ds['data'], struc_out, method)" ] }, { @@ -283,8 +214,8 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear', 'cubic', 'average', 'mode']:\n", - " struc_out = resample.structured_da_to_ds(struc2d_nan, ds, method=method)\n", - " compare_structured_data_arrays(struc2d_nan, struc_out, method)" + " struc_out = resample.structured_da_to_ds(ds['data_nan'], ds_fine, method=method)\n", + " compare_structured_data_arrays(ds['data_nan'], struc_out, method)" ] }, { @@ -300,14 +231,14 @@ "metadata": {}, "outputs": [], "source": [ - "interp_spline = RectBivariateSpline(struc2d.x.values, struc2d.y.values[::-1], struc2d.values[::-1], \n", - " ky=min(3,len(struc2d.y)-1), \n", - " kx=min(3,len(struc2d.x)-1))\n", - "arr_out = interp_spline(ds.x, ds.y[::-1], grid=True)[::-1]\n", + "interp_spline = RectBivariateSpline(ds.x.values, ds.y.values[::-1], ds['data'].values[::-1], \n", + " ky=min(3,len(ds.y)-1), \n", + " kx=min(3,len(ds.x)-1))\n", + "arr_out = interp_spline(ds_fine.x, ds_fine.y[::-1], grid=True)[::-1]\n", "struc_out = xr.DataArray(arr_out, dims=('y', 'x'),\n", - " coords={'x': ds.x,\n", - " 'y': ds.y})\n", - "compare_structured_data_arrays(struc2d, struc_out, 'Rectangular Bivariate Spline')" + " coords={'x': ds_fine.x,\n", + " 'y': ds_fine.y})\n", + "compare_structured_data_arrays(ds['data'], struc_out, 'Rectangular Bivariate Spline')" ] }, { @@ -323,24 +254,21 @@ "metadata": {}, "outputs": [], "source": [ - "interp_spline = RectBivariateSpline(struc2d_nan.x.values, struc2d_nan.y.values[::-1], struc2d_nan.values[::-1], \n", - " ky=min(3,len(struc2d_nan.y)-1), \n", - " kx=min(3,len(struc2d_nan.x)-1))\n", - "interp_spline = RectBivariateSpline(struc2d_nan.x.values, struc2d_nan.y.values[::-1], struc2d_nan.values[::-1], \n", - " ky=min(3,len(struc2d_nan.y)-1), \n", - " kx=min(3,len(struc2d_nan.x)-1))\n", - "arr_out = interp_spline(ds.x, ds.y[::-1], grid=True)[::-1]\n", + "interp_spline = RectBivariateSpline(ds.x.values, ds.y.values[::-1], ds['data_nan'].values[::-1], \n", + " ky=min(3,len(ds.y)-1), \n", + " kx=min(3,len(ds.x)-1))\n", + "arr_out = interp_spline(ds_fine.x, ds_fine.y[::-1], grid=True)[::-1]\n", "struc_out = xr.DataArray(arr_out, dims=('y', 'x'),\n", - " coords={'x': ds.x,\n", - " 'y': ds.y})\n", - "compare_structured_data_arrays(struc2d_nan, struc_out, 'Rectangular Bivariate Spline')" + " coords={'x': ds_fine.x,\n", + " 'y': ds_fine.y})\n", + "compare_structured_data_arrays(ds['data_nan'], struc_out, 'Rectangular Bivariate Spline')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## [3. Structured grid to locally refined grid](#TOC)" + "## Structured grid to locally refined grid" ] }, { @@ -349,27 +277,19 @@ "metadata": {}, "outputs": [], "source": [ - "def compare_struct_to_vertex(struc2d, res_vertex2d_n, vertices, method):\n", + "def compare_struct_to_vertex(struc2d, res_vertex2d_n, dsv, method):\n", " fig, axes = plt.subplots(ncols=2, figsize=(12,6))\n", - " struc2d.plot(ax=axes[0], edgecolor='k', vmin=0, vmax=9)\n", + " norm = Normalize(0, 9)\n", + " struc2d.plot(ax=axes[0], edgecolor='k', norm=norm)\n", " axes[0].set_aspect('equal')\n", " axes[0].set_title('structured grid')\n", - " nlmod.plot.plot_vertex_array(res_vertex2d_n, vertices, ax=axes[1], gridkwargs={'edgecolor': 'k'}, vmin=0, vmax=9)\n", + " \n", + " pc = nlmod.plot.data_array(res_vertex2d_n, ds=dsv, ax=axes[1], edgecolor='k', norm=norm)\n", + " plt.colorbar(pc)\n", + " axes[1].set_aspect('equal')\n", " axes[1].set_title(f'locally refined grid, method {method}')" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "data_vars = dict(area=(['icell2d'], area_vertex))\n", - "coords = dict(x=xr.DataArray(xvc, dims=['icell2d',]), y=xr.DataArray(yvc, dims=['icell2d',]))\n", - "attrs = dict(gridtype='vertex', extent=[950, 1250, 20050, 20350])\n", - "dsv = xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -384,15 +304,15 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear', 'cubic']:\n", - " res_vertex2d_n = resample.structured_da_to_ds(struc2d, dsv, method=method)\n", - " compare_struct_to_vertex(struc2d, res_vertex2d_n, vertices, method)" + " res_vertex2d_n = resample.structured_da_to_ds(ds['data'], dsv, method=method)\n", + " compare_struct_to_vertex(ds['data'], res_vertex2d_n, dsv, method)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## [4. Locally refined grid to structured grid](#TOC)" + "## Locally refined grid to structured grid" ] }, { @@ -401,11 +321,14 @@ "metadata": {}, "outputs": [], "source": [ - "def compare_vertex_to_struct(vertex1, struc_out_n, method):\n", + "def compare_vertex_to_struct(vertex1, dsv, struc_out_n, method):\n", " fig, axes = plt.subplots(ncols=2, figsize=(12,6))\n", - " nlmod.plot.plot_vertex_array(vertex1, vertices, ax=axes[0], gridkwargs={'edgecolor': 'k'}, vmin=0, vmax=9)\n", + " norm = Normalize(0, 9)\n", + " pc = nlmod.plot.data_array(vertex1, ds=dsv, ax=axes[0], edgecolor='k', norm=norm)\n", + " plt.colorbar(pc)\n", " axes[0].set_title('original')\n", - " struc_out_n.plot(ax=axes[1], edgecolor='k', vmin=0, vmax=9)\n", + " axes[0].set_aspect('equal')\n", + " struc_out_n.plot(ax=axes[1], edgecolor='k', norm=norm)\n", " axes[1].set_title(f'resampled, method {method}')\n", " axes[1].set_aspect('equal')" ] @@ -424,8 +347,8 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear', 'cubic']:\n", - " struc_out_n = resample.vertex_da_to_ds(vertex1, ds=ds, method=method)\n", - " compare_vertex_to_struct(vertex1, struc_out_n, method)" + " struc_out_n = resample.vertex_da_to_ds(dsv['data'], ds=ds, method=method)\n", + " compare_vertex_to_struct(dsv['data'], dsv, struc_out_n, method)" ] }, { @@ -442,15 +365,15 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear', 'cubic']:\n", - " struc_out_n = resample.vertex_da_to_ds(vertex1_nan, ds=ds, method=method)\n", - " compare_vertex_to_struct(vertex1_nan, struc_out_n, method)" + " struc_out_n = resample.vertex_da_to_ds(dsv['data_nan'], ds=ds, method=method)\n", + " compare_vertex_to_struct(dsv['data_nan'], dsv, struc_out_n, method)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## [5. Fill nan values](#TOC)" + "## Fill nan values" ] }, { @@ -467,15 +390,15 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear']:\n", - " struc2d_nan_filled = resample.fillnan_da_structured_grid(struc2d_nan, method=method)\n", - " compare_structured_data_arrays(struc2d_nan, struc2d_nan_filled, method)" + " struc2d_nan_filled = resample.fillnan_da_structured_grid(ds['data_nan'], method=method)\n", + " compare_structured_data_arrays(ds['data_nan'], struc2d_nan_filled, method)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## vertex grid" + "### vertex grid" ] }, { @@ -484,11 +407,15 @@ "metadata": {}, "outputs": [], "source": [ - "def compare_vertex_arrays(vertex1, vertex2, method):\n", + "def compare_vertex_arrays(vertex1, vertex2, dsv, method):\n", " fig, axes = plt.subplots(ncols=2, figsize=(12,6))\n", - " nlmod.plot.plot_vertex_array(vertex1, vertices, ax=axes[0], gridkwargs={'edgecolor': 'k'}, vmin=0, vmax=9)\n", + " norm = Normalize(0, 9)\n", + " pc = nlmod.plot.data_array(vertex1, ds=dsv, ax=axes[0], edgecolor='k', norm=norm)\n", + " plt.colorbar(pc)\n", " axes[0].set_title('original')\n", - " nlmod.plot.plot_vertex_array(vertex2, vertices, ax=axes[1], gridkwargs={'edgecolor': 'k'}, vmin=0, vmax=9)\n", + " axes[0].set_aspect('equal')\n", + " pc = nlmod.plot.data_array(vertex2, ds=dsv, ax=axes[1], edgecolor='k', norm=norm)\n", + " plt.colorbar(pc)\n", " axes[1].set_title(f'resampled, method {method}')\n", " axes[1].set_aspect('equal')" ] @@ -500,494 +427,15 @@ "outputs": [], "source": [ "for method in ['nearest', 'linear']:\n", - " vertex1_nan_filled = resample.fillnan_da_vertex_grid(vertex1_nan, x=xvc, y=yvc, method=method)\n", - " compare_vertex_arrays(vertex1_nan, vertex1_nan_filled, method)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## [6. Vector to grid](#TOC)\n", - "\n", - "Vector data can be points, lines or polygons often saved as shapefiles and visualised using GIS software. A common operation is to project vector data on a modelgrid. For example to add a surface water line to a grid. Here are some functions in `nlmod` to project vector data on a modelgrid." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Vector data:\n", - " - point\n", - " - line\n", - " - polygon\n", - "\n", - "Vector data -> vector data per cell -> aggregeren per cell -> in het grid zetten" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "point_geom = [Point(x,y) for x, y in zip([1000, 1200, 1225, 1300],[20200, 20175, 20175, 20425])]\n", - "point_gdf = gpd.GeoDataFrame({'values':[1,52,66,24]}, geometry=point_geom)\n", - "line_geom = [LineString([point_geom[0], point_geom[1]]),\n", - " LineString([point_geom[2], point_geom[3]]),\n", - " LineString([point_geom[0], point_geom[3]])]\n", - "line_gdf = gpd.GeoDataFrame({'values':[1,52,66]}, geometry=line_geom)\n", - "pol_geom = [shp_polygon([[p.x, p.y] for p in [point_geom[0], point_geom[1], point_geom[2], point_geom[3]]]),\n", - " shp_polygon([[p.x, p.y] for p in [point_geom[0], point_geom[1], point_geom[2], Point(1200,20300)]])]\n", - "pol_gdf = gpd.GeoDataFrame({'values':[166, 5]}, geometry=pol_geom)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = plt.subplots()\n", - "struc2d.plot(ax=ax, lw=0.1, edgecolor='k', alpha=0.5)\n", - "point_gdf.plot(ax=ax, color='green')\n", - "line_gdf.plot(ax=ax, color='purple')\n", - "pol_gdf.plot(ax=ax, alpha=0.6)\n", - "\n", - "ax.set_xlim(ax.get_xlim()[0], 1400)\n", - "ax.set_ylim(ax.get_ylim()[0], 20500)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Points" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Aggregation methods" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", - "\n", - "da1 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='max')\n", - "da2 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='mean')\n", - "da3 = nlmod.grid.gdf_to_da(point_gdf, ds, 'values', agg_method='nearest')\n", - "\n", - "vmin = min(da1.min(), da2.min(), da3.min())\n", - "vmax = max(da1.max(), da2.max(), da3.max())\n", - "\n", - "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", - "axes[0].set_title('aggregation max')\n", - "axes[0].axis('scaled')\n", - "\n", - "\n", - "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", - "axes[1].set_title('aggregation mean')\n", - "axes[1].axis('scaled')\n", - "\n", - "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", - "axes[2].set_title('aggregation nearest')\n", - "axes[2].axis('scaled')\n", - "\n", - "point_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", - "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", - "axes[3].set_title('points')\n", - "axes[3].axis('scaled')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Interpolation methods" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, axes = plt.subplots(ncols=3, figsize=(15,5))\n", - "ds.attrs['model_ws'] = ''\n", - "sim = nlmod.sim.sim(ds)\n", - "gwf = nlmod.gwf.gwf(ds, sim)\n", - "dis = nlmod.gwf.dis(ds, gwf)\n", - "da1 = nlmod.grid.gdf_to_data_array_struc(point_gdf, gwf, field='values', interp_method='nearest')\n", - "da2 = nlmod.grid.gdf_to_data_array_struc(point_gdf, gwf, field='values', interp_method='linear')\n", - "\n", - "vmin = min(da1.min(), da2.min())\n", - "vmax = max(da1.max(), da2.max())\n", - "\n", - "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", - "axes[0].set_title('interpolation nearest')\n", - "axes[0].axis('scaled')\n", - "\n", - "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", - "axes[1].set_title('interpolation linear')\n", - "axes[1].axis('scaled')\n", - "\n", - "\n", - "point_gdf.plot('values', ax=axes[2], vmin=vmin, vmax=vmax, legend=True)\n", - "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[2])\n", - "axes[2].set_title('points')\n", - "axes[2].axis('scaled')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Lines" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", - "\n", - "da1 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='max_length')\n", - "da2 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='length_weighted')\n", - "da3 = nlmod.grid.gdf_to_da(line_gdf, ds, 'values', agg_method='nearest')\n", - "\n", - "vmin = min(da1.min(), da2.min(), da3.min())\n", - "vmax = max(da1.max(), da2.max(), da3.max())\n", - "\n", - "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", - "axes[0].set_title('aggregation max_length')\n", - "axes[0].axis('scaled')\n", - "\n", - "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", - "axes[1].set_title('aggregation length_weighted')\n", - "axes[1].axis('scaled')\n", - "\n", - "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", - "axes[2].set_title('aggregation nearest')\n", - "axes[2].axis('scaled')\n", - "\n", - "line_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", - "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", - "axes[3].set_title('lines')\n", - "axes[3].axis('scaled')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Polygons" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, axes = plt.subplots(ncols=4, figsize=(20,5))\n", - "\n", - "da1 = nlmod.grid.gdf_to_da(pol_gdf, ds, 'values', agg_method='max_area')\n", - "da2 = nlmod.grid.gdf_to_da(pol_gdf, ds, 'values', agg_method='area_weighted')\n", - "da3 = nlmod.grid.gdf_to_data_array_struc(pol_gdf, gwf, 'values', agg_method='nearest')\n", - "\n", - "vmin = min(da1.min(), da2.min(), da3.min())\n", - "vmax = max(da1.max(), da2.max(), da3.max())\n", - "\n", - "da1.plot(ax=axes[0], vmin=vmin, vmax=vmax)\n", - "axes[0].set_title('aggregation max_area')\n", - "axes[0].axis('scaled')\n", - "\n", - "da2.plot(ax=axes[1], vmin=vmin, vmax=vmax)\n", - "axes[1].set_title('aggregation area_weighted')\n", - "axes[1].axis('scaled')\n", - "\n", - "da3.plot(ax=axes[2], vmin=vmin, vmax=vmax)\n", - "axes[2].set_title('aggregation nearest')\n", - "axes[2].axis('scaled')\n", - "\n", - "pol_gdf.plot('values', ax=axes[3], vmin=vmin, vmax=vmax, legend=True)\n", - "nlmod.grid.modelgrid_from_ds(ds).plot(ax=axes[3])\n", - "axes[3].set_title('polygons')\n", - "axes[3].axis('scaled');" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Intersect vector data with grid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gdf_point_grid = nlmod.grid.gdf_to_grid(point_gdf, ds)\n", - "gdf_line_grid = nlmod.grid.gdf_to_grid(line_gdf, ds)\n", - "gdf_pol_grid = nlmod.grid.gdf_to_grid(pol_gdf, ds)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "fig, ax = plt.subplots()\n", - "\n", - "gdf_point_grid.plot(ax=ax, color='green')\n", - "gdf_line_grid['ind'] = range(gdf_line_grid.shape[0])\n", - "gdf_line_grid.plot('ind', ax=ax, cmap='jet')\n", - "gdf_pol_grid['ind'] = range(gdf_pol_grid.shape[0])\n", - "gdf_pol_grid.plot('ind',ax=ax, alpha=0.6)\n", - "\n", - "nlmod.grid.modelgrid_from_ds(ds).plot(ax=ax)\n", - "ax.set_xlim(ax.get_xlim()[0], 1300)\n", - "ax.set_ylim(ax.get_ylim()[0], 20400)" + " vertex1_nan_filled = resample.fillnan_da_vertex_grid(dsv['data_nan'], ds=dsv, method=method)\n", + " compare_vertex_arrays(dsv['data_nan'], vertex1_nan_filled, dsv, method)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Aggregate parameters per model cell\n", - "\n", - "Aggregatie options:\n", - "- point: max, min, mean\n", - "- line: max, min, length_weighted, max_length\n", - "- polygon: max, min, area_weighted, area_max\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# point\n", - "display(gdf_point_grid)\n", - "nlmod.grid.aggregate_vector_per_cell(gdf_point_grid,{'values':'max'})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# line\n", - "display(gdf_line_grid)\n", - "nlmod.grid.aggregate_vector_per_cell(gdf_line_grid,{'values':'length_weighted'})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# polygon\n", - "display(gdf_pol_grid)\n", - "nlmod.grid.aggregate_vector_per_cell(gdf_pol_grid,{'values':'area_weighted'})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## [7. Grid to reclist](#TOC)\n", - "For some modflow packages (drn, riv, ghb, wel) you need to specify stress_period_data to create them using flopy. This stress_period_data consists of reclists (also called lrcd for a structured grid) for every time step. \n", - "\n", - "The function `da_to_reclist` can be used to convert grid data (both structured and vertex) to a reclist. This function has many arguments:\n", - "- `mask`, boolean DataArray to determine which cells should be added to the reclist. Can be 2d or 3d.\n", - "- `layer`, if `mask` is a 2d array the value of `layer` is used in the reclist. If `mask` is 3d or `first_active_layer` is True the `layer` argument is ignored.\n", - "- `only_active_cells`, if True only add cells with an idomain of 1 to the reclist\n", - "- `first_active_layer`, if True use the first active layer, obtained from the idomain, as the layer for each cell.\n", - "- `col1`,`col2` and `col3`, The column data of the reclist.\n", - "\n", - "The examples below show the result of each argument." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# add layer dimension\n", - "if 'layer' not in ds.dims:\n", - " ds = ds.expand_dims({'layer':range(3)})\n", - "\n", - "# create some data arrays\n", - "ds['da1'] = ('layer', 'y','x'), np.random.randint(0,10,(ds.dims['layer'], ds.dims['y'],ds.dims['x']))\n", - "ds['da2'] = ('y','x'), np.random.randint(0,10,(ds.dims['y'],ds.dims['x']))\n", - "ds['da3'] = ('y','x'), np.random.randint(0,10,(ds.dims['y'],ds.dims['x']))\n", - "\n", - "# add a nodata value\n", - "ds.attrs['nodata'] = -999\n", - "\n", - "# create an idomain of ones except for the first cell which is zero\n", - "idomain = np.ones((ds.dims['layer'], ds.dims['y'],ds.dims['x']))\n", - "idomain[0,0,0] = 0\n", - "ds['idomain'] = ('layer','y','x'), idomain" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Mask and layer\n", - "If `mask` is a 2d array, the `layer` argument specifies the layer that is used in the reclist." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# structured 2d grid to reclist\n", - "mask2d = ds['da2'] == ds['da2'][0,0]\n", - "reclist1 = nlmod.grid.da_to_reclist(ds, mask2d, col1=ds['da1'][0], col2='da2', layer=0, only_active_cells=False)\n", - "reclist1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If the `mask` is three dimensional the `layer` argument is ignored." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# create a 3dmask \n", - "mask3d = ds['da1'] == ds['da1'].values[0,0,0]\n", - "\n", - "# use this mask to create the reclist\n", - "reclist2 = nlmod.grid.da_to_reclist(ds, mask3d, col1='da1',col2=100, layer=0, only_active_cells=False)\n", - "reclist2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Only active cells\n", - "With `only_active_cells=True` we make sure only active cells end up in the reclist. Which cells are active is based on the `idomain` in the model dataset." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Only return the cells with an active idomain \n", - "reclist3 = nlmod.grid.da_to_reclist(ds, mask3d, col1='da1',col2=100, only_active_cells=True)\n", - "reclist3" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# also possible for a 2d grid\n", - "mask2d = ds['da2'] == ds['da2'][0,0]\n", - "reclist1 = nlmod.grid.da_to_reclist(ds, mask2d, col1=ds['da1'][0], col2='da2', layer=0, only_active_cells=True)\n", - "reclist1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### First active_layer\n", - "Use `first_active_layer=True` to add the first active layer to the reclist. The first active layer is obtained from the idomain." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# create a reclist with col1 (str), col2 (DataArray), col3 (int)\n", - "reclist4 = nlmod.grid.da_to_reclist(ds, mask2d, col1='da2',col2='da3', first_active_layer=True)\n", - "reclist4" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Reclist columns\n", - "The `col1`, `col2` and `col3` arguments specify what data should be listed in the reclist. The types can be `str`,`xarray.DataArray`,`None` or other. If the value is a `str` the corresponding DataArray from the Dataset is used to get data for the reclist. If the value is an `xarray.DataArray` the DataArray is used. If the value is `None` the column is not added to the reclist and if the value is from another type the value is used for every record in the reclist.\n", - "\n", - "Be aware that if `mask` is a 3d array, the DataArrays of the column should also be 3d." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# create a reclist with col1 (str), col2 (DataArray), col3 (int)\n", - "reclist5 = nlmod.grid.da_to_reclist(ds, mask3d, col1=ds['idomain'],col2='da1',col3=9, layer=0, only_active_cells=False)\n", - "reclist5" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Vertex model to reclist" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# add some random DataArray to the vertex dataset\n", - "da_vert = np.random.randint(0,10,(dsv['area'].shape))\n", - "dsv['da_vert'] = ('icell2d'), da_vert\n", - "\n", - "# create rec list from a vertex dataset\n", - "mask_vert = dsv['da_vert'] == dsv['da_vert'][0]\n", - "reclist6 = nlmod.grid.da_to_reclist(dsv, mask_vert, col1='da_vert',col2=2330, only_active_cells=False)\n", - "reclist6" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## [8. Real world example](#TOC)\n", + "## Real world example\n", "In this example we will resample the values of the dutch Digital Terrain Model (DTM) from AHN4 to a structured grid and a vertex grid, using several methods. First we will download the AHN-information." ] }, @@ -1065,7 +513,7 @@ "outputs": [], "source": [ "norm = Normalize(ahn.min(), ahn.max())\n", - "for method in ['nearest', 'linear', 'average', 'min', 'max']:\n", + "for method in ['nearest', 'average', 'min', 'max']:\n", " ahn_res = nlmod.resample.structured_da_to_ds(ahn, dsv, method=method)\n", " \n", " fig, axes = nlmod.plot.get_map(extent, ncols=2, figsize=(12,6))\n", @@ -1107,7 +555,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.10.8" }, "vscode": { "interpreter": { diff --git a/docs/examples/generate_logo.py b/docs/examples/generate_logo.py index da00566c..87cc9e57 100644 --- a/docs/examples/generate_logo.py +++ b/docs/examples/generate_logo.py @@ -14,6 +14,7 @@ n = 2 dx = 10000 # dx = 20000 +figsize = 5 nederland["geometry"] = nederland.simplify(1000) @@ -27,8 +28,8 @@ else: raise (Exception(f"Unsupported dx: {dx}")) -f, ax = nlmod.plot.get_map(extent, figsize=5, base=50000) -nederland.plot(ax=ax, color="k") +# f, ax = nlmod.plot.get_map(extent, figsize=figsize, base=50000) +# nederland.plot(ax=ax, color="k") # %% generate a model dataset ds = nlmod.get_ds(extent, dx) @@ -36,7 +37,7 @@ ds = nlmod.grid.refine(ds, "logo", [(nederland, n)]) # %% plot this -f, ax = nlmod.plot.get_map(extent, figsize=5, base=50000) +f, ax = nlmod.plot.get_map(extent, figsize=figsize, base=50000) nlmod.plot.data_array(ds["area"] * np.NaN, ds, edgecolor="k", clip_on=False) # nlmod.plot.modelgrid(ds, ax=ax) # modelgrid = nlmod.grid.modelgrid_from_ds(ds) @@ -44,8 +45,12 @@ # ax.axis(extent) ax.axis("off") +# ax.text(50000, 550000, "nlmod", fontsize=30, ha="center", va="center") + fname = f"logo_{dx}_{n}" if filled: fname = f"{fname}_filled" +if figsize != 5: + fname = f"{fname}_{figsize}" f.savefig(os.path.join("..", "_static", f"{fname}.png"), bbox_inches="tight") f.savefig(os.path.join("..", "_static", f"{fname}.svg"), bbox_inches="tight") diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 8a8aa93a..5cb420f3 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -34,6 +34,7 @@ from the hydrogeological subsurface model REGIS:: regis_ds = nlmod.read.regis.get_regis(extent) # download REGIS data These methods are accessible through ``nlmod.read``. Supported data sources include: + * REGIS, hydrogeological model (raster) * GeoTOP, geological model for the shallow subsurface (raster) * AHN, the digital elevation model (raster) diff --git a/docs/index.rst b/docs/index.rst index fb52c096..274aa18b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,6 +1,10 @@ nlmod ===== +.. image:: _static/logo_10000_2.png + :width: 400 + :alt: The logo of nlmod + nlmod is a package with functions to pre-process, build and visualize MODFLOW models. The main focus is on building models in the Netherlands using publicly available datasets, though lots of functionality could be applied to any diff --git a/docs/requirements.txt b/docs/requirements.txt index bb82611a..07cb5605 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,8 +1,7 @@ -sphinx_rtd_theme +sphinx_rtd_theme==1.0.0 Ipython ipykernel nbsphinx -nbsphinx_link netCDF4==1.5.7 rasterstats geocube diff --git a/nlmod/dims/grid.py b/nlmod/dims/grid.py index 696db49e..52638c45 100644 --- a/nlmod/dims/grid.py +++ b/nlmod/dims/grid.py @@ -426,14 +426,15 @@ def update_ds_from_layer_ds(ds, layer_ds, method="nearest", **kwargs): Parameters ---------- - ds : TYPE - DESCRIPTION. - layer_ds : TYPE - DESCRIPTION. + ds : xarray.Dataset + dataset with model data. Can have dimension (layer, y, x) or + (layer, icell2d). + layer_ds : xarray.Dataset + dataset with layer data. method : str - THe method used for resampling layer_ds to the grid of ds - **kwargs : TYPE - DESCRIPTION. + The method used for resampling layer_ds to the grid of ds. + **kwargs : keyword arguments + keyword arguments are passed to the fill_nan_top_botm_kh_kv-method. Returns ------- @@ -1331,9 +1332,9 @@ def gdf_to_grid( shpn = shp.copy() shpn["cellid"] = r["cellids"][i] shpn[geometry] = r["ixshapes"][i] - if shp[geometry].type == "LineString": + if shp[geometry].geom_type == "LineString": shpn["length"] = r["lengths"][i] - elif shp[geometry].type == "Polygon": + elif shp[geometry].geom_type == "Polygon": shpn["area"] = r["areas"][i] shps.append(shpn) return gpd.GeoDataFrame(shps, geometry=geometry) @@ -1542,8 +1543,8 @@ def mask_model_edge(ds, idomain): ) elif ds.gridtype == "vertex": - if 'vertices' not in ds: - ds['vertices'] = get_vertices(ds) + if "vertices" not in ds: + ds["vertices"] = get_vertices(ds) polygons_grid = gis.polygons_from_model_ds(ds) gdf_grid = gpd.GeoDataFrame(geometry=polygons_grid) extent_edge = util.polygon_from_extent(ds.extent).exterior diff --git a/nlmod/dims/resample.py b/nlmod/dims/resample.py index d5c305dc..b9c82c22 100644 --- a/nlmod/dims/resample.py +++ b/nlmod/dims/resample.py @@ -395,7 +395,7 @@ def vertex_da_to_ds(da, ds, method="nearest"): return da points = np.array((da.x.data, da.y.data)).T - if ds.gridtype == "vertex": + if "gridtype" in ds.attrs and ds.gridtype == "vertex": if len(da.dims) == 1: xi = list(zip(ds.x.values, ds.y.values)) z = griddata(points, da.values, xi, method=method) diff --git a/nlmod/plot.py b/nlmod/plot.py index cdfc0dc2..57bd2e7f 100644 --- a/nlmod/plot.py +++ b/nlmod/plot.py @@ -483,7 +483,7 @@ def get_map( figsize=figsize, nrows=nrows, ncols=ncols, sharex=sharex, sharey=sharey ) if isinstance(background, bool) and background is True: - background = "OpenStreetMap.Mapnik" + background = "nlmaps.standaard" def set_ax_in_map(ax): ax.axis("scaled")