From 47d3de2408aae49cfc1053eedee6511618a54a5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dav=C3=ADd=20Brakenhoff?= Date: Fri, 25 Aug 2023 10:45:36 +0200 Subject: [PATCH 1/3] codacy suggestions --- nlmod/dims/base.py | 12 ++++++------ nlmod/dims/grid.py | 10 +++++++--- nlmod/dims/layers.py | 4 ++-- nlmod/dims/resample.py | 12 ++++++------ nlmod/gwf/output.py | 6 +++++- nlmod/gwf/recharge.py | 2 +- nlmod/gwf/surface_water.py | 12 ++++++------ nlmod/plot/dcs.py | 2 +- nlmod/plot/flopy.py | 1 - nlmod/plot/plot.py | 2 +- nlmod/plot/plotutil.py | 4 ++-- nlmod/read/__init__.py | 2 +- nlmod/read/ahn.py | 4 ++-- nlmod/read/bgt.py | 10 +++++----- nlmod/read/boundaries.py | 5 ++--- nlmod/read/brp.py | 2 +- nlmod/read/geotop.py | 2 +- nlmod/read/jarkus.py | 4 ++-- nlmod/read/knmi.py | 4 ++-- nlmod/read/meteobase.py | 10 +++++----- nlmod/read/regis.py | 2 +- nlmod/read/rws.py | 12 ++++++------ nlmod/read/waterboard.py | 6 +++--- nlmod/read/webservices.py | 21 +++++++++++---------- nlmod/sim/sim.py | 2 +- nlmod/util.py | 1 - 26 files changed, 80 insertions(+), 74 deletions(-) diff --git a/nlmod/dims/base.py b/nlmod/dims/base.py index 14441fda..4bbe6793 100644 --- a/nlmod/dims/base.py +++ b/nlmod/dims/base.py @@ -207,7 +207,7 @@ def extrapolate_ds(ds, mask=None): # all of the model cells are is inside the known area return ds if mask.all(): - raise (Exception("The model only contains NaNs")) + raise (ValueError("The model only contains NaNs")) if "gridtype" in ds.attrs and ds.gridtype == "vertex": x = ds.x.data y = ds.y.data @@ -452,10 +452,10 @@ def _get_vertex_grid_ds( coords = {"layer": layers, "y": y, "x": x} dims = ("layer", "icell2d") ds = xr.Dataset( - data_vars=dict( - top=(dims[1:], top), - botm=(dims, botm), - ), + data_vars={ + "top": (dims[1:], top), + "botm": (dims, botm), + }, coords=coords, attrs=attrs, ) @@ -605,7 +605,7 @@ def get_ds( resample._set_angrot_attributes(extent, xorigin, yorigin, angrot, attrs) x, y = resample.get_xy_mid_structured(attrs["extent"], delr, delc) - coords = dict(x=x, y=y, layer=layer) + coords = {"x": x, "y": y, "layer": layer} if angrot != 0.0: affine = resample.get_affine_mod_to_world(attrs) xc, yc = affine * np.meshgrid(x, y) diff --git a/nlmod/dims/grid.py b/nlmod/dims/grid.py index 4fb993e3..0e89ef12 100644 --- a/nlmod/dims/grid.py +++ b/nlmod/dims/grid.py @@ -384,7 +384,9 @@ def refine( ds_has_rotation = "angrot" in ds.attrs and ds.attrs["angrot"] != 0.0 if model_coordinates: if not ds_has_rotation: - raise (Exception("The supplied shapes need to be in realworld coordinates")) + raise ( + ValueError("The supplied shapes need to be in realworld coordinates") + ) elif ds_has_rotation: affine_matrix = get_affine_world_to_mod(ds).to_shapely() @@ -395,7 +397,9 @@ def refine( fname, geom_type, level = refinement_feature if not model_coordinates and ds_has_rotation: raise ( - Exception("Converting files to model coordinates not supported") + NotImplementedError( + "Converting files to model coordinates not supported" + ) ) g.add_refinement_features(fname, geom_type, level, layers=[0]) elif len(refinement_feature) == 2: @@ -1419,7 +1423,7 @@ def gdf_to_grid( The GeoDataFrame with the geometries per grid-cell. """ if ml is None and ix is None: - raise (Exception("Either specify ml or ix")) + raise (ValueError("Either specify ml or ix")) if ml is not None: if isinstance(ml, xr.Dataset): diff --git a/nlmod/dims/layers.py b/nlmod/dims/layers.py index c050c586..f9522af2 100644 --- a/nlmod/dims/layers.py +++ b/nlmod/dims/layers.py @@ -40,7 +40,7 @@ def calculate_thickness(ds, top="top", bot="botm"): if ds[top].shape[-1] == ds[bot].shape[-1]: # top is only top of first layer thickness = xr.zeros_like(ds[bot]) - for lay in range(len(thickness)): + for lay, _ in enumerate(thickness): if lay == 0: thickness[lay] = ds[top] - ds[bot][lay] else: @@ -242,7 +242,7 @@ def split_layers_ds( layers_org = layers.copy() # add extra layers (keep the original ones for now, as we will copy data first) for lay0 in split_dict: - for i in range(len(split_dict[lay0])): + for i, _ in enumerate(split_dict[lay0]): index = layers.index(lay0) layers.insert(index, lay0 + "_" + str(i + 1)) layers_org.insert(index, lay0) diff --git a/nlmod/dims/resample.py b/nlmod/dims/resample.py index 7e339a8f..34e0a87a 100644 --- a/nlmod/dims/resample.py +++ b/nlmod/dims/resample.py @@ -208,13 +208,13 @@ def _set_angrot_attributes(extent, xorigin, yorigin, angrot, attrs): extent[0] = 0.0 extent[1] = extent[1] - xorigin elif extent[0] != 0.0: - raise (Exception("Either extent[0] or xorigin needs to be 0.0")) + raise (ValueError("Either extent[0] or xorigin needs to be 0.0")) if yorigin == 0.0: yorigin = extent[2] extent[2] = 0.0 extent[3] = extent[3] - yorigin elif extent[2] != 0.0: - raise (Exception("Either extent[2] or yorigin needs to be 0.0")) + raise (ValueError("Either extent[2] or yorigin needs to be 0.0")) attrs["extent"] = extent attrs["xorigin"] = xorigin attrs["yorigin"] = yorigin @@ -441,7 +441,7 @@ def dim_to_regular_dim(da, dims, z): # just use griddata z = griddata(points, da.data, xi, method=method) dims = ["y", "x"] - coords = dict(x=ds.x, y=ds.y) + coords = {"x": ds.x, "y": ds.y} return xr.DataArray(z, dims=dims, coords=coords) @@ -486,7 +486,7 @@ def structured_da_to_ds(da, ds, method="average", nodata=np.NaN): if hasattr(rasterio.enums.Resampling, method): resampling = getattr(rasterio.enums.Resampling, method) else: - raise (Exception(f"Unknown resample method: {method}")) + raise ValueError(f"Unknown resample method: {method}") # fill crs if it is None for da or ds if ds.rio.crs is None and da.rio.crs is None: logger.info("No crs in da and ds. Assuming ds and da are both in EPSG:28992") @@ -533,11 +533,11 @@ def structured_da_to_ds(da, ds, method="average", nodata=np.NaN): da_temp = da_temp.assign_coords(x=x, y=y) mask = ds["area"] == area - da_out.loc[dict(icell2d=mask)] = da_temp.sel( + da_out.loc[{"icell2d": mask}] = da_temp.sel( y=ds["y"][mask], x=ds["x"][mask] ) else: - raise (Exception(f"Gridtype {ds.gridtype} not supported")) + raise (NotImplementedError(f"Gridtype {ds.gridtype} not supported")) # some stuff is added by the reproject_match function that should not be there added_coords = set(da_out.coords) - set(ds.coords) diff --git a/nlmod/gwf/output.py b/nlmod/gwf/output.py index 047240e3..5b271b03 100644 --- a/nlmod/gwf/output.py +++ b/nlmod/gwf/output.py @@ -305,7 +305,11 @@ def get_head_at_point(head, x, y, ds=None, gi=None, drop_nan_layers=True): if "icell2d" in head.dims: if gi is None: if ds is None: - raise (Exception("Please supply either gi or ds for a vertex grid")) + raise ( + ValueError( + "Please supply either gi (GridIntersect) or ds for a vertex grid" + ) + ) gi = flopy.utils.GridIntersect(modelgrid_from_ds(ds), method="vertex") icelld2 = gi.intersect(Point(x, y))["cellids"][0] head_point = head[:, :, icelld2] diff --git a/nlmod/gwf/recharge.py b/nlmod/gwf/recharge.py index 3e544dd4..706f0fc7 100644 --- a/nlmod/gwf/recharge.py +++ b/nlmod/gwf/recharge.py @@ -119,7 +119,7 @@ def ds_to_evt(gwf, ds, pname="evt", nseg=1, surface=None, depth=None, **kwargs): """ assert nseg == 1, "More than one evaporation segment not yet supported" if "surf_rate_specified" in kwargs: - raise (Exception("surf_rate_specified not yet supported")) + raise (NotImplementedError("surf_rate_specified not yet supported")) if surface is None: logger.info("Setting evaporation surface to 1 meter below top") surface = ds["top"] - 1.0 diff --git a/nlmod/gwf/surface_water.py b/nlmod/gwf/surface_water.py index ba3828e6..4180714a 100644 --- a/nlmod/gwf/surface_water.py +++ b/nlmod/gwf/surface_water.py @@ -1,5 +1,6 @@ import logging import warnings +from functools import partial import flopy import numpy as np @@ -461,12 +462,12 @@ def build_spd( mask = (stage > botm_cell) & (idomain_cell > 0) if not mask.any(): raise ( - Exception("rbot and stage are below the bottom of the model") + ValueError("rbot and stage are below the bottom of the model") ) lays = [np.where(mask)[0][0]] conds = [cond] else: - raise (Exception(f"Method {layer_method} unknown")) + raise (ValueError(f"Method {layer_method} unknown")) auxlist = [] if "aux" in row: @@ -519,7 +520,7 @@ def add_info_to_gdf( measure = overlap.length else: msg = f"Unsupported geometry type: {geom_type}" - raise (Exception(msg)) + raise TypeError(msg) if np.any(measure.sum() > min_total_overlap * measure_org): # take the largest @@ -806,7 +807,7 @@ def get_gdf(ds=None, extent=None, fname_ahn=None, ahn=None, buffer=0.0): """ if extent is None: if ds is None: - raise (Exception("Please supply either ds or extent to get_gdf")) + raise (ValueError("Please supply either ds or extent to get_gdf")) extent = get_extent_polygon(ds) gdf = bgt.get_bgt(extent) if fname_ahn is not None: @@ -851,7 +852,6 @@ def add_min_ahn_to_gdf(gdf, ahn, buffer=0.0, column="ahn_min"): A GeoDataFrame with surface water features, with an added column containing the minimum surface level height near the features. """ - from functools import partial from geocube.api.core import make_geocube from geocube.rasterize import rasterize_image @@ -1001,7 +1001,7 @@ def gdf_to_seasonal_pkg( elif pkg == "GHB": cl = flopy.mf6.ModflowGwfghb else: - raise (Exception(f"Unknown package: {pkg}")) + raise (ValueError(f"Unknown package: {pkg}")) package = cl( gwf, stress_period_data={0: spd}, diff --git a/nlmod/plot/dcs.py b/nlmod/plot/dcs.py index c53ce427..64e6ac77 100644 --- a/nlmod/plot/dcs.py +++ b/nlmod/plot/dcs.py @@ -6,8 +6,8 @@ import xarray as xr from matplotlib.collections import LineCollection, PatchCollection from matplotlib.patches import Rectangle -from shapely.geometry import LineString, MultiLineString, Point, Polygon from shapely.affinity import affine_transform +from shapely.geometry import LineString, MultiLineString, Point, Polygon from ..dims.grid import modelgrid_from_ds from ..dims.resample import get_affine_world_to_mod diff --git a/nlmod/plot/flopy.py b/nlmod/plot/flopy.py index b3934734..29547739 100644 --- a/nlmod/plot/flopy.py +++ b/nlmod/plot/flopy.py @@ -1,4 +1,3 @@ -import os from functools import partial import flopy diff --git a/nlmod/plot/plot.py b/nlmod/plot/plot.py index 86121e41..f4c48662 100644 --- a/nlmod/plot/plot.py +++ b/nlmod/plot/plot.py @@ -178,7 +178,7 @@ def data_array(da, ds=None, ax=None, rotated=False, edgecolor=None, **kwargs): ax = plt.gca() if "icell2d" in da.dims: if ds is None: - raise (Exception("Supply model dataset (ds) for grid information")) + raise (ValueError("Supply model dataset (ds) for grid information")) if isinstance(ds, list): patches = ds else: diff --git a/nlmod/plot/plotutil.py b/nlmod/plot/plotutil.py index 6485dc7d..3a8a2cb1 100644 --- a/nlmod/plot/plotutil.py +++ b/nlmod/plot/plotutil.py @@ -214,7 +214,7 @@ def colorbar_inside( cax.yaxis.tick_left() cax.yaxis.set_label_position("left") if isinstance(bbox_labels, bool) and bbox_labels is True: - bbox_labels = dict(facecolor="w", alpha=0.5) + bbox_labels = {"facecolor": "w", "alpha": 0.5} if isinstance(bbox_labels, dict): for label in cb.ax.yaxis.get_ticklabels(): label.set_bbox(bbox_labels) @@ -237,7 +237,7 @@ def title_inside( ax = plt.gca() if isinstance(bbox, bool): if bbox: - bbox = dict(facecolor="w", alpha=0.5) + bbox = {"facecolor": "w", "alpha": 0.5} else: bbox = None return ax.text( diff --git a/nlmod/read/__init__.py b/nlmod/read/__init__.py index 4b9ed651..0a9e9729 100644 --- a/nlmod/read/__init__.py +++ b/nlmod/read/__init__.py @@ -1,6 +1,7 @@ from . import ( ahn, bgt, + boundaries, brp, geotop, jarkus, @@ -11,7 +12,6 @@ rws, waterboard, webservices, - boundaries, ) from .geotop import get_geotop from .regis import get_regis diff --git a/nlmod/read/ahn.py b/nlmod/read/ahn.py index bb41bdb5..166ddd95 100644 --- a/nlmod/read/ahn.py +++ b/nlmod/read/ahn.py @@ -54,7 +54,7 @@ def get_ahn(ds=None, identifier="AHN4_DTM_5m", method="average", extent=None): elif version == 4: ahn_ds_raw = get_ahn4(extent, identifier=identifier) else: - raise (Exception(f"Unknown ahn-version: {version}")) + raise (ValueError(f"Unknown ahn-version: {version}")) ahn_ds_raw = ahn_ds_raw.drop_vars("band") @@ -98,7 +98,7 @@ def _infer_url(identifier=None): if "ahn3" in identifier: url = "https://service.pdok.nl/rws/ahn3/wcs/v1_0?service=wcs" else: - ValueError(f"unknown identifier -> {identifier}") + raise ValueError(f"unknown identifier -> {identifier}") return url diff --git a/nlmod/read/bgt.py b/nlmod/read/bgt.py index ec6d3801..a5902a89 100644 --- a/nlmod/read/bgt.py +++ b/nlmod/read/bgt.py @@ -1,6 +1,6 @@ import json import time -import xml.etree.ElementTree as ET +import xml.etree.ElementTree as ElementTree from io import BytesIO from zipfile import ZipFile @@ -246,7 +246,7 @@ def read_label(child, d): d["label_plaatsingspunt"] = Point(xy) d["label_hoek"] = float(positie.find(f"{ns}hoek").text) - tree = ET.parse(fname) + tree = ElementTree.parse(fname) ns = "{http://www.opengis.net/citygml/2.0}" data = [] for com in tree.findall(f".//{ns}cityObjectMember"): @@ -286,7 +286,7 @@ def read_label(child, d): elif child[0].tag == f"{ns}Point": d[key] = Point(read_point(child[0])) else: - raise (Exception((f"Unsupported tag: {child[0].tag}"))) + raise (ValueError((f"Unsupported tag: {child[0].tag}"))) elif key == "nummeraanduidingreeks": ns = "{http://www.geostandaarden.nl/imgeo/2.1}" nar = child.find(f"{ns}Nummeraanduidingreeks").find( @@ -301,11 +301,11 @@ def read_label(child, d): elif child[0].tag == f"{ns}Curve": d[key] = LineString(read_curve(child[0])) else: - raise (Exception((f"Unsupported tag: {child[0].tag}"))) + raise (ValueError((f"Unsupported tag: {child[0].tag}"))) elif key == "openbareRuimteNaam": read_label(child, d) else: - raise (Exception((f"Unknown key: {key}"))) + raise (KeyError((f"Unknown key: {key}"))) data.append(d) if len(data) > 0: if geometry is None: diff --git a/nlmod/read/boundaries.py b/nlmod/read/boundaries.py index cd759986..a7c36c46 100644 --- a/nlmod/read/boundaries.py +++ b/nlmod/read/boundaries.py @@ -1,5 +1,4 @@ -from . import webservices -from . import waterboard +from . import waterboard, webservices def get_municipalities(source="cbs", drop_water=True, **kwargs): @@ -43,7 +42,7 @@ def get_provinces(source="cbs", **kwargs): gdf = webservices.wfs(url, layer, **kwargs) gdf = gdf.set_index("statnaam") else: - raise (Exception(f"Unknown source: {source}")) + raise (ValueError(f"Unknown source: {source}")) return gdf diff --git a/nlmod/read/brp.py b/nlmod/read/brp.py index 8628836c..7824df4e 100644 --- a/nlmod/read/brp.py +++ b/nlmod/read/brp.py @@ -10,7 +10,7 @@ def get_percelen(extent, year=None): gdf = gdf.set_index("fuuid") else: if year < 2009 or year > 2021: - raise (Exception("Only data available from 2009 up to and including 2021")) + raise (ValueError("Only data available from 2009 up to and including 2021")) url = f"https://services.arcgis.com/nSZVuSZjHpEZZbRo/ArcGIS/rest/services/BRP_{year}/FeatureServer" gdf = webservices.arcrest(url, 0, extent=extent) return gdf diff --git a/nlmod/read/geotop.py b/nlmod/read/geotop.py index 3b9f183b..315d68e0 100644 --- a/nlmod/read/geotop.py +++ b/nlmod/read/geotop.py @@ -36,7 +36,7 @@ def get_lithok_colors(): 8: (216, 163, 32), 9: (95, 95, 255), } - colors = {key: tuple([x / 255 for x in colors[key]]) for key in colors} + colors = {key: tuple([x / 255 for x in color]) for key, color in colors.items()} return colors diff --git a/nlmod/read/jarkus.py b/nlmod/read/jarkus.py index 341e5dac..50ccc996 100644 --- a/nlmod/read/jarkus.py +++ b/nlmod/read/jarkus.py @@ -18,7 +18,7 @@ import xarray as xr from .. import cache -from ..dims.resample import fillnan_da, structured_da_to_ds, get_extent +from ..dims.resample import fillnan_da, get_extent, structured_da_to_ds from ..util import get_da_from_da_ds, get_ds_empty logger = logging.getLogger(__name__) @@ -190,7 +190,7 @@ def get_jarkus_tilenames(extent, kind="jarkus"): elif kind == "vaklodingen": url = "http://opendap.deltares.nl/thredds/dodsC/opendap/rijkswaterstaat/vaklodingen/catalog.nc" else: - raise (Exception(f"Unsupported kind: {kind}")) + raise (ValueError(f"Unsupported kind: {kind}")) ds_jarkus_catalog = xr.open_dataset(url) ew_x = ds_jarkus_catalog["projectionCoverage_x"].values diff --git a/nlmod/read/knmi.py b/nlmod/read/knmi.py index 630b7001..14a1d817 100644 --- a/nlmod/read/knmi.py +++ b/nlmod/read/knmi.py @@ -116,7 +116,7 @@ def get_recharge(ds, method="linear", most_common_station=False): loc_sel = locations.loc[(locations["stn_ev24"] == stn)] _add_ts_to_ds(ts, loc_sel, "evaporation", ds_out) else: - raise (Exception(f"Unknown method: {method}")) + raise (ValueError(f"Unknown method: {method}")) for datavar in ds_out: ds_out[datavar].attrs["source"] = "KNMI" ds_out[datavar].attrs["date"] = dt.datetime.now().strftime("%Y%m%d") @@ -145,7 +145,7 @@ def _add_ts_to_ds(timeseries, loc_sel, variable, ds): # there will be NaN's, which we fill by backfill model_recharge = model_recharge.fillna(method="bfill") if model_recharge.isna().any(): - raise (Exception("There are NaN-values in {variable}")) + raise (ValueError(f"There are NaN-values in {variable}.")) # add data to ds values = np.repeat(model_recharge.values[:, np.newaxis], loc_sel.shape[0], 1) diff --git a/nlmod/read/meteobase.py b/nlmod/read/meteobase.py index cf202e6e..a5df07db 100644 --- a/nlmod/read/meteobase.py +++ b/nlmod/read/meteobase.py @@ -231,11 +231,11 @@ def read_meteobase_ascii( da = DataArray( data_array, dims=["time", "y", "x"], - coords=dict( - time=times, - x=x, - y=y, - ), + coords={ + "time": times, + "x": x, + "y": y, + }, attrs=meta, name=foldername, ) diff --git a/nlmod/read/regis.py b/nlmod/read/regis.py index 6314c53c..0e1431d8 100644 --- a/nlmod/read/regis.py +++ b/nlmod/read/regis.py @@ -139,7 +139,7 @@ def get_regis( if len(ds.x) == 0 or len(ds.y) == 0: msg = "No data found. Please supply valid extent in the Netherlands in RD-coordinates" - raise (Exception(msg)) + raise (ValueError(msg)) # make sure layer names are regular strings ds["layer"] = ds["layer"].astype(str) diff --git a/nlmod/read/rws.py b/nlmod/read/rws.py index c6e61bcf..350b087f 100644 --- a/nlmod/read/rws.py +++ b/nlmod/read/rws.py @@ -5,8 +5,8 @@ import logging import os -import numpy as np import geopandas as gpd +import numpy as np import xarray as xr import nlmod @@ -212,11 +212,11 @@ def calculate_sea_coverage( When true, dtm-values are connected diagonally as well (to determine the level the sea will reach). The default is False. method : str, optional - The method used to scale the dtm to ds. The default is "mode" (mode means that - if more than half of the (not-nan) cells are wet, the cell is classified as + The method used to scale the dtm to ds. The default is "mode" (mode means that + if more than half of the (not-nan) cells are wet, the cell is classified as sea). nodata : int or float, optional - The value for model cells outside the coverage of the dtm. + The value for model cells outside the coverage of the dtm. Only used internally. The default is -1. return_filled_dtm : bool, optional When True, return the filled dtm. The default is False. @@ -242,8 +242,8 @@ def calculate_sea_coverage( # determine the closest x and y in the dtm grid x_sea = dtm.x.sel(x=xy_sea[0], method="nearest") y_sea = dtm.y.sel(y=xy_sea[1], method="nearest") - dtm.loc[dict(x=x_sea, y=y_sea)] = dtm.min() - seed.loc[dict(x=x_sea, y=y_sea)] = dtm.min() + dtm.loc[{"x": x_sea, "y": y_sea}] = dtm.min() + seed.loc[{"x": x_sea, "y": y_sea}] = dtm.min() seed = seed.data footprint = np.ones((3, 3), dtype="bool") diff --git a/nlmod/read/waterboard.py b/nlmod/read/waterboard.py index fc09c7b4..3434cb76 100644 --- a/nlmod/read/waterboard.py +++ b/nlmod/read/waterboard.py @@ -522,9 +522,9 @@ def get_data(wb, data_kind, extent=None, max_record_count=None, config=None, **k f = "geojson" if wb not in config: - raise (Exception(f"No configuration available for {wb}")) + raise (ValueError(f"No configuration available for {wb}")) if data_kind not in config[wb]: - raise (Exception(f"{data_kind} not available for {wb}")) + raise (ValueError(f"{data_kind} not available for {wb}")) conf = config[wb][data_kind] url = conf["url"] if "layer" in conf: @@ -551,7 +551,7 @@ def get_data(wb, data_kind, extent=None, max_record_count=None, config=None, **k url, layer, extent, max_record_count=max_record_count, **kwargs ) else: - raise (Exception("Unknown server-kind: {server_kind}")) + raise (ValueError(f"Unknown server-kind: {server_kind}")) if len(gdf) == 0: return gdf diff --git a/nlmod/read/webservices.py b/nlmod/read/webservices.py index 02d75c88..a24ff228 100644 --- a/nlmod/read/webservices.py +++ b/nlmod/read/webservices.py @@ -10,6 +10,7 @@ from owslib.wcs import WebCoverageService from rasterio import merge from rasterio.io import MemoryFile +from requests.exceptions import HTTPError from shapely.geometry import MultiPolygon, Point, Polygon from tqdm import tqdm @@ -84,7 +85,7 @@ def arcrest( for feature in features: if "rings" in feature["geometry"]: if len(feature["geometry"]) > 1: - raise (Exception("Not supported yet")) + raise (NotImplementedError("Multiple rings not supported yet")) if len(feature["geometry"]["rings"]) == 1: geometry = Polygon(feature["geometry"]["rings"][0]) else: @@ -127,7 +128,7 @@ def arcrest( def _get_data(url, params, timeout=120, **kwargs): r = requests.get(url, params=params, timeout=timeout, **kwargs) if not r.ok: - raise (Exception(f"Request not successful: {r.url}")) + raise (HTTPError(f"Request not successful: {r.url}")) data = r.json() if "error" in data: code = data["error"]["code"] @@ -147,7 +148,7 @@ def wfs( timeout=120, ): """Download data from a wfs server.""" - params = dict(version=version, request="GetFeature") + params = {"version": version, "request": "GetFeature"} if version == "2.0.0": params["typeNames"] = layer else: @@ -159,7 +160,7 @@ def wfs( # get the maximum number of features r = requests.get(f"{url}&request=getcapabilities", timeout=120) if not r.ok: - raise (Exception(f"Request not successful: {r.url}")) + raise (HTTPError(f"Request not successful: {r.url}")) root = ET.fromstring(r.text) ns = {"ows": "http://www.opengis.net/ows/1.1"} @@ -200,7 +201,7 @@ def add_constrains(elem, constraints): params["resultType"] = "hits" r = requests.get(url, params=params, timeout=timeout) if not r.ok: - raise (Exception(f"Request not successful: {r.url}")) + raise (HTTPError(f"Request not successful: {r.url}")) params.pop("resultType") root = ET.fromstring(r.text) if "ExceptionReport" in root.tag: @@ -220,14 +221,14 @@ def add_constrains(elem, constraints): params["startindex"] = ip * max_record_count r = requests.get(url, params=params, timeout=timeout) if not r.ok: - raise (Exception(f"Request not successful: {r.url}")) + raise (HTTPError(f"Request not successful: {r.url}")) gdfs.append(gpd.read_file(BytesIO(r.content), driver=driver)) gdf = pd.concat(gdfs).reset_index(drop=True) else: # download all features in one go r = requests.get(url, params=params, timeout=timeout) if not r.ok: - raise (Exception(f"Request not successful: {r.url}")) + raise (HTTPError(f"Request not successful: {r.url}")) gdf = gpd.read_file(BytesIO(r.content), driver=driver) return gdf @@ -426,7 +427,7 @@ def _download_wcs(extent, res, url, identifier, version, fmt, crs): if identifier is None: identifiers = list(wcs.contents) if len(identifiers) > 1: - raise (Exception("wcs contains more than 1 identifier. Please specify.")) + raise (ValueError("wcs contains more than 1 identifier. Please specify.")) identifier = identifiers[0] if version == "1.0.0": bbox = (extent[0], extent[2], extent[1], extent[3]) @@ -445,9 +446,9 @@ def _download_wcs(extent, res, url, identifier, version, fmt, crs): identifier=[identifier], subsets=subsets, format=fmt, crs=crs ) else: - raise Exception(f"Version {version} not yet supported") + raise NotImplementedError(f"Version {version} not yet supported") if "xml" in output.info()["Content-Type"]: root = ET.fromstring(output.read()) - raise (Exception("Download failed: {}".format(root[0].text))) + raise (Exception(f"Download failed: {root[0].text}")) memfile = MemoryFile(output.read()) return memfile diff --git a/nlmod/sim/sim.py b/nlmod/sim/sim.py index f1d13027..17fe91f5 100644 --- a/nlmod/sim/sim.py +++ b/nlmod/sim/sim.py @@ -36,7 +36,7 @@ def write_and_run(sim, ds, write_ds=True, script_path=None, silent=False): if True the model dataset is cached to a NetCDF-file (.nc) with a name equal to its attribute called "model_name". The default is True. script_path : str or None, optional - full path of the Jupyter Notebook (.ipynb) or the module (.py) with the + full path of the Jupyter Notebook (.ipynb) or the module (.py) with the modelscript. The default is None. Preferably this path does not have to be given manually but there is currently no good option to obtain the filename of a Jupyter Notebook from within the notebook itself. diff --git a/nlmod/util.py b/nlmod/util.py index 1d95711f..763c6c97 100644 --- a/nlmod/util.py +++ b/nlmod/util.py @@ -82,7 +82,6 @@ def get_exe_path(exe_name="mf6"): return exe_path - def get_ds_empty(ds, keep_coords=None): """Get a copy of a dataset with only coordinate information. From c9ad9171f8afabb0f1c0cd92b32889a4c91cc9a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dav=C3=ADd=20Brakenhoff?= Date: Fri, 25 Aug 2023 11:01:44 +0200 Subject: [PATCH 2/3] codacy fix --- nlmod/read/webservices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nlmod/read/webservices.py b/nlmod/read/webservices.py index a24ff228..b24beced 100644 --- a/nlmod/read/webservices.py +++ b/nlmod/read/webservices.py @@ -449,6 +449,6 @@ def _download_wcs(extent, res, url, identifier, version, fmt, crs): raise NotImplementedError(f"Version {version} not yet supported") if "xml" in output.info()["Content-Type"]: root = ET.fromstring(output.read()) - raise (Exception(f"Download failed: {root[0].text}")) + raise (HTTPError(f"Download failed: {root[0].text}")) memfile = MemoryFile(output.read()) return memfile From b25a2094077d7735a1064277cc468994049f46f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dav=C3=ADd=20Brakenhoff?= Date: Fri, 25 Aug 2023 11:02:20 +0200 Subject: [PATCH 3/3] codacy fix --- nlmod/read/bgt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nlmod/read/bgt.py b/nlmod/read/bgt.py index a5902a89..db7b2ef9 100644 --- a/nlmod/read/bgt.py +++ b/nlmod/read/bgt.py @@ -1,7 +1,7 @@ import json import time -import xml.etree.ElementTree as ElementTree from io import BytesIO +from xml.etree import ElementTree from zipfile import ZipFile import geopandas as gpd