Skip to content

Commit

Permalink
Allow non-unique and non-monotonic coordinates in get_clean_interp_in…
Browse files Browse the repository at this point in the history
…dex and polyfit (#4099)

* Allow non-unique and non-monotonic in get_clean_interp_index and polyfit

* black on missing.py

* Apply change to polyval, add pr to whats new

* Add tests for get_clean_interp_index return values
  • Loading branch information
aulemahal authored Jun 5, 2020
1 parent 93b2d04 commit 09df5ca
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 9 deletions.
2 changes: 1 addition & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ New Features
By `Andrew Williams <https://github.com/AndrewWilliams3142>`_
- Added :py:func:`xarray.cov` and :py:func:`xarray.corr` (:issue:`3784`, :pull:`3550`, :pull:`4089`).
By `Andrew Williams <https://github.com/AndrewWilliams3142>`_ and `Robin Beer <https://github.com/r-beer>`_.
- Added :py:meth:`DataArray.polyfit` and :py:func:`xarray.polyval` for fitting polynomials. (:issue:`3349`)
- Added :py:meth:`DataArray.polyfit` and :py:func:`xarray.polyval` for fitting polynomials. (:issue:`3349`, :pull:`3733`, :pull:`4099`)
By `Pascal Bourgault <https://github.com/aulemahal>`_.
- Control over attributes of result in :py:func:`merge`, :py:func:`concat`,
:py:func:`combine_by_coords` and :py:func:`combine_nested` using
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1506,7 +1506,7 @@ def polyval(coord, coeffs, degree_dim="degree"):
from .dataarray import DataArray
from .missing import get_clean_interp_index

x = get_clean_interp_index(coord, coord.name)
x = get_clean_interp_index(coord, coord.name, strict=False)

deg_coord = coeffs[degree_dim]

Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5839,7 +5839,7 @@ def polyfit(
variables = {}
skipna_da = skipna

x = get_clean_interp_index(self, dim)
x = get_clean_interp_index(self, dim, strict=False)
xname = "{}_".format(self[dim].name)
order = int(deg) + 1
lhs = np.vander(x, order)
Expand Down
17 changes: 11 additions & 6 deletions xarray/core/missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,9 @@ def _apply_over_vars_with_dim(func, self, dim=None, **kwargs):
return ds


def get_clean_interp_index(arr, dim: Hashable, use_coordinate: Union[str, bool] = True):
def get_clean_interp_index(
arr, dim: Hashable, use_coordinate: Union[str, bool] = True, strict: bool = True
):
"""Return index to use for x values in interpolation or curve fitting.
Parameters
Expand All @@ -221,6 +223,8 @@ def get_clean_interp_index(arr, dim: Hashable, use_coordinate: Union[str, bool]
If use_coordinate is True, the coordinate that shares the name of the
dimension along which interpolation is being performed will be used as the
x values. If False, the x values are set as an equally spaced sequence.
strict : bool
Whether to raise errors if the index is either non-unique or non-monotonic (default).
Returns
-------
Expand Down Expand Up @@ -257,11 +261,12 @@ def get_clean_interp_index(arr, dim: Hashable, use_coordinate: Union[str, bool]
if isinstance(index, pd.MultiIndex):
index.name = dim

if not index.is_monotonic:
raise ValueError(f"Index {index.name!r} must be monotonically increasing")
if strict:
if not index.is_monotonic:
raise ValueError(f"Index {index.name!r} must be monotonically increasing")

if not index.is_unique:
raise ValueError(f"Index {index.name!r} has duplicate values")
if not index.is_unique:
raise ValueError(f"Index {index.name!r} has duplicate values")

# Special case for non-standard calendar indexes
# Numerical datetime values are defined with respect to 1970-01-01T00:00:00 in units of nanoseconds
Expand All @@ -282,7 +287,7 @@ def get_clean_interp_index(arr, dim: Hashable, use_coordinate: Union[str, bool]
# xarray/numpy raise a ValueError
raise TypeError(
f"Index {index.name!r} must be castable to float64 to support "
f"interpolation, got {type(index).__name__}."
f"interpolation or curve fitting, got {type(index).__name__}."
)

return index
Expand Down
12 changes: 12 additions & 0 deletions xarray/tests/test_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,18 @@ def test_get_clean_interp_index_potential_overflow():
get_clean_interp_index(da, "time")


@pytest.mark.parametrize("index", ([0, 2, 1], [0, 1, 1]))
def test_get_clean_interp_index_strict(index):
da = xr.DataArray([0, 1, 2], dims=("x",), coords={"x": index})

with pytest.raises(ValueError):
get_clean_interp_index(da, "x")

clean = get_clean_interp_index(da, "x", strict=False)
np.testing.assert_array_equal(index, clean)
assert clean.dtype == np.float64


@pytest.fixture
def da_time():
return xr.DataArray(
Expand Down

0 comments on commit 09df5ca

Please sign in to comment.