Skip to content

Commit

Permalink
pre-commit run --all
Browse files Browse the repository at this point in the history
  • Loading branch information
etienneschalk committed Feb 26, 2024
1 parent 2e3aa83 commit 93ef61c
Show file tree
Hide file tree
Showing 37 changed files with 499 additions and 272 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: 'v0.2.1'
rev: 'v0.2.2'
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
Expand Down
1 change: 0 additions & 1 deletion asv_bench/benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def _skip_slow():
>>> from . import _skip_slow
>>> def time_something_slow():
... pass
...
>>> time_something.setup = _skip_slow
"""
if os.environ.get("ASV_SKIP_SLOW", "0") == "1":
Expand Down
3 changes: 2 additions & 1 deletion doc/examples/apply_ufunc_vectorize_1d.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,8 @@
"interped = interped.rename({\"new_lat\": \"lat\"})\n",
"interped[\"lat\"] = newlat # need to add this manually\n",
"xr.testing.assert_allclose(\n",
" expected.transpose(*interped.dims), interped # order of dims is different\n",
" expected.transpose(*interped.dims),\n",
" interped, # order of dims is different\n",
")\n",
"interped"
]
Expand Down
27 changes: 18 additions & 9 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,8 @@ def to_netcdf(
*,
multifile: Literal[True],
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore]: ...
) -> tuple[ArrayWriter, AbstractDataStore]:
...


# path=None writes to bytes
Expand All @@ -1149,7 +1150,8 @@ def to_netcdf(
compute: bool = True,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> bytes: ...
) -> bytes:
...


# compute=False returns dask.Delayed
Expand All @@ -1167,7 +1169,8 @@ def to_netcdf(
compute: Literal[False],
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> Delayed: ...
) -> Delayed:
...


# default return None
Expand All @@ -1184,7 +1187,8 @@ def to_netcdf(
compute: Literal[True] = True,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> None: ...
) -> None:
...


# if compute cannot be evaluated at type check time
Expand All @@ -1202,7 +1206,8 @@ def to_netcdf(
compute: bool = False,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> Delayed | None: ...
) -> Delayed | None:
...


# if multifile cannot be evaluated at type check time
Expand All @@ -1220,7 +1225,8 @@ def to_netcdf(
compute: bool = False,
multifile: bool = False,
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: ...
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None:
...


# Any
Expand All @@ -1237,7 +1243,8 @@ def to_netcdf(
compute: bool = False,
multifile: bool = False,
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: ...
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None:
...


def to_netcdf(
Expand Down Expand Up @@ -1671,7 +1678,8 @@ def to_zarr(
zarr_version: int | None = None,
write_empty_chunks: bool | None = None,
chunkmanager_store_kwargs: dict[str, Any] | None = None,
) -> backends.ZarrStore: ...
) -> backends.ZarrStore:
...


# compute=False returns dask.Delayed
Expand All @@ -1694,7 +1702,8 @@ def to_zarr(
zarr_version: int | None = None,
write_empty_chunks: bool | None = None,
chunkmanager_store_kwargs: dict[str, Any] | None = None,
) -> Delayed: ...
) -> Delayed:
...


def to_zarr(
Expand Down
7 changes: 2 additions & 5 deletions xarray/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,13 +235,10 @@ def load(self):
For example::
class SuffixAppendingDataStore(AbstractDataStore):
def load(self):
variables, attributes = AbstractDataStore.load(self)
variables = {'%s_suffix' % k: v
for k, v in variables.items()}
attributes = {'%s_suffix' % k: v
for k, v in attributes.items()}
variables = {"%s_suffix" % k: v for k, v in variables.items()}
attributes = {"%s_suffix" % k: v for k, v in attributes.items()}
return variables, attributes
This function will be called anytime variables or attributes
Expand Down
6 changes: 3 additions & 3 deletions xarray/backends/locks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ class SerializableLock:
The creation of locks is itself not threadsafe.
"""

_locks: ClassVar[WeakValueDictionary[Hashable, threading.Lock]] = (
WeakValueDictionary()
)
_locks: ClassVar[
WeakValueDictionary[Hashable, threading.Lock]
] = WeakValueDictionary()
token: Hashable
lock: threading.Lock

Expand Down
4 changes: 2 additions & 2 deletions xarray/backends/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def backends_dict_from_pkg(


def set_missing_parameters(
backend_entrypoints: dict[str, type[BackendEntrypoint]]
backend_entrypoints: dict[str, type[BackendEntrypoint]],
) -> None:
for _, backend in backend_entrypoints.items():
if backend.open_dataset_parameters is None:
Expand All @@ -91,7 +91,7 @@ def set_missing_parameters(


def sort_backends(
backend_entrypoints: dict[str, type[BackendEntrypoint]]
backend_entrypoints: dict[str, type[BackendEntrypoint]],
) -> dict[str, type[BackendEntrypoint]]:
ordered_backends_entrypoints = {}
for be_name in STANDARD_BACKENDS_ORDER:
Expand Down
108 changes: 72 additions & 36 deletions xarray/core/_typed_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,163 +455,199 @@ def _binary_op(
raise NotImplementedError

@overload
def __add__(self, other: T_DataArray) -> T_DataArray: ...
def __add__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __add__(self, other: VarCompatible) -> Self: ...
def __add__(self, other: VarCompatible) -> Self:
...

def __add__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.add)

@overload
def __sub__(self, other: T_DataArray) -> T_DataArray: ...
def __sub__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __sub__(self, other: VarCompatible) -> Self: ...
def __sub__(self, other: VarCompatible) -> Self:
...

def __sub__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.sub)

@overload
def __mul__(self, other: T_DataArray) -> T_DataArray: ...
def __mul__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __mul__(self, other: VarCompatible) -> Self: ...
def __mul__(self, other: VarCompatible) -> Self:
...

def __mul__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.mul)

@overload
def __pow__(self, other: T_DataArray) -> T_DataArray: ...
def __pow__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __pow__(self, other: VarCompatible) -> Self: ...
def __pow__(self, other: VarCompatible) -> Self:
...

def __pow__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.pow)

@overload
def __truediv__(self, other: T_DataArray) -> T_DataArray: ...
def __truediv__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __truediv__(self, other: VarCompatible) -> Self: ...
def __truediv__(self, other: VarCompatible) -> Self:
...

def __truediv__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.truediv)

@overload
def __floordiv__(self, other: T_DataArray) -> T_DataArray: ...
def __floordiv__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __floordiv__(self, other: VarCompatible) -> Self: ...
def __floordiv__(self, other: VarCompatible) -> Self:
...

def __floordiv__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.floordiv)

@overload
def __mod__(self, other: T_DataArray) -> T_DataArray: ...
def __mod__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __mod__(self, other: VarCompatible) -> Self: ...
def __mod__(self, other: VarCompatible) -> Self:
...

def __mod__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.mod)

@overload
def __and__(self, other: T_DataArray) -> T_DataArray: ...
def __and__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __and__(self, other: VarCompatible) -> Self: ...
def __and__(self, other: VarCompatible) -> Self:
...

def __and__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.and_)

@overload
def __xor__(self, other: T_DataArray) -> T_DataArray: ...
def __xor__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __xor__(self, other: VarCompatible) -> Self: ...
def __xor__(self, other: VarCompatible) -> Self:
...

def __xor__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.xor)

@overload
def __or__(self, other: T_DataArray) -> T_DataArray: ...
def __or__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __or__(self, other: VarCompatible) -> Self: ...
def __or__(self, other: VarCompatible) -> Self:
...

def __or__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.or_)

@overload
def __lshift__(self, other: T_DataArray) -> T_DataArray: ...
def __lshift__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __lshift__(self, other: VarCompatible) -> Self: ...
def __lshift__(self, other: VarCompatible) -> Self:
...

def __lshift__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.lshift)

@overload
def __rshift__(self, other: T_DataArray) -> T_DataArray: ...
def __rshift__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __rshift__(self, other: VarCompatible) -> Self: ...
def __rshift__(self, other: VarCompatible) -> Self:
...

def __rshift__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.rshift)

@overload
def __lt__(self, other: T_DataArray) -> T_DataArray: ...
def __lt__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __lt__(self, other: VarCompatible) -> Self: ...
def __lt__(self, other: VarCompatible) -> Self:
...

def __lt__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.lt)

@overload
def __le__(self, other: T_DataArray) -> T_DataArray: ...
def __le__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __le__(self, other: VarCompatible) -> Self: ...
def __le__(self, other: VarCompatible) -> Self:
...

def __le__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.le)

@overload
def __gt__(self, other: T_DataArray) -> T_DataArray: ...
def __gt__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __gt__(self, other: VarCompatible) -> Self: ...
def __gt__(self, other: VarCompatible) -> Self:
...

def __gt__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.gt)

@overload
def __ge__(self, other: T_DataArray) -> T_DataArray: ...
def __ge__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __ge__(self, other: VarCompatible) -> Self: ...
def __ge__(self, other: VarCompatible) -> Self:
...

def __ge__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, operator.ge)

@overload # type:ignore[override]
def __eq__(self, other: T_DataArray) -> T_DataArray: ...
def __eq__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __eq__(self, other: VarCompatible) -> Self: ...
def __eq__(self, other: VarCompatible) -> Self:
...

def __eq__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, nputils.array_eq)

@overload # type:ignore[override]
def __ne__(self, other: T_DataArray) -> T_DataArray: ...
def __ne__(self, other: T_DataArray) -> T_DataArray:
...

@overload
def __ne__(self, other: VarCompatible) -> Self: ...
def __ne__(self, other: VarCompatible) -> Self:
...

def __ne__(self, other: VarCompatible) -> Self | T_DataArray:
return self._binary_op(other, nputils.array_ne)
Expand Down
Loading

0 comments on commit 93ef61c

Please sign in to comment.