Skip to content

Commit 6180152

Browse files
[pre-commit.ci] pre-commit autoupdate (#8710)
* [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.2.0](astral-sh/ruff-pre-commit@v0.1.9...v0.2.0) - [github.com/psf/black-pre-commit-mirror: 23.12.1 → 24.1.1](psf/black-pre-commit-mirror@23.12.1...24.1.1) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent c9ba2be commit 6180152

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+403
-574
lines changed

.pre-commit-config.yaml

+3-3
Original file line numberDiff line numberDiff line change
@@ -13,21 +13,21 @@ repos:
1313
- id: mixed-line-ending
1414
- repo: https://github.com/astral-sh/ruff-pre-commit
1515
# Ruff version.
16-
rev: 'v0.1.9'
16+
rev: 'v0.2.0'
1717
hooks:
1818
- id: ruff
1919
args: ["--fix", "--show-fixes"]
2020
# https://github.com/python/black#version-control-integration
2121
- repo: https://github.com/psf/black-pre-commit-mirror
22-
rev: 23.12.1
22+
rev: 24.1.1
2323
hooks:
2424
- id: black-jupyter
2525
- repo: https://github.com/keewis/blackdoc
2626
rev: v0.3.9
2727
hooks:
2828
- id: blackdoc
2929
exclude: "generate_aggregations.py"
30-
additional_dependencies: ["black==23.12.1"]
30+
additional_dependencies: ["black==24.1.1"]
3131
- id: blackdoc-autoupdate-black
3232
- repo: https://github.com/pre-commit/mirrors-mypy
3333
rev: v1.8.0

doc/contributing.rst

+2-4
Original file line numberDiff line numberDiff line change
@@ -670,8 +670,7 @@ typically find tests wrapped in a class.
670670

671671
.. code-block:: python
672672
673-
class TestReallyCoolFeature:
674-
...
673+
class TestReallyCoolFeature: ...
675674
676675
Going forward, we are moving to a more *functional* style using the
677676
`pytest <http://doc.pytest.org/en/latest/>`__ framework, which offers a richer
@@ -680,8 +679,7 @@ writing test classes, we will write test functions like this:
680679

681680
.. code-block:: python
682681
683-
def test_really_cool_feature():
684-
...
682+
def test_really_cool_feature(): ...
685683
686684
Using ``pytest``
687685
~~~~~~~~~~~~~~~~

doc/gallery/plot_cartopy_facetgrid.py

-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
.. _this discussion: https://github.com/pydata/xarray/issues/1397#issuecomment-299190567
1414
"""
1515

16-
1716
import cartopy.crs as ccrs
1817
import matplotlib.pyplot as plt
1918

doc/gallery/plot_control_colorbar.py

+1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
Use ``cbar_kwargs`` keyword to specify the number of ticks.
77
The ``spacing`` kwarg can be used to draw proportional ticks.
88
"""
9+
910
import matplotlib.pyplot as plt
1011

1112
import xarray as xr

properties/test_encode_decode.py

+1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
These ones pass, just as you'd hope!
55
66
"""
7+
78
import pytest
89

910
pytest.importorskip("hypothesis")

properties/test_pandas_roundtrip.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
Property-based tests for roundtripping between xarray and pandas objects.
33
"""
4+
45
from functools import partial
56

67
import numpy as np

xarray/backends/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
DataStores provide a uniform interface for saving and loading data in different
44
formats. They should not be used directly, but rather through Dataset objects.
55
"""
6+
67
from xarray.backends.common import AbstractDataStore, BackendArray, BackendEntrypoint
78
from xarray.backends.file_manager import (
89
CachingFileManager,

xarray/backends/api.py

+18-25
Original file line numberDiff line numberDiff line change
@@ -791,13 +791,15 @@ def open_dataarray(
791791
def open_mfdataset(
792792
paths: str | NestedSequence[str | os.PathLike],
793793
chunks: T_Chunks | None = None,
794-
concat_dim: str
795-
| DataArray
796-
| Index
797-
| Sequence[str]
798-
| Sequence[DataArray]
799-
| Sequence[Index]
800-
| None = None,
794+
concat_dim: (
795+
str
796+
| DataArray
797+
| Index
798+
| Sequence[str]
799+
| Sequence[DataArray]
800+
| Sequence[Index]
801+
| None
802+
) = None,
801803
compat: CompatOptions = "no_conflicts",
802804
preprocess: Callable[[Dataset], Dataset] | None = None,
803805
engine: T_Engine | None = None,
@@ -1101,8 +1103,7 @@ def to_netcdf(
11011103
*,
11021104
multifile: Literal[True],
11031105
invalid_netcdf: bool = False,
1104-
) -> tuple[ArrayWriter, AbstractDataStore]:
1105-
...
1106+
) -> tuple[ArrayWriter, AbstractDataStore]: ...
11061107

11071108

11081109
# path=None writes to bytes
@@ -1119,8 +1120,7 @@ def to_netcdf(
11191120
compute: bool = True,
11201121
multifile: Literal[False] = False,
11211122
invalid_netcdf: bool = False,
1122-
) -> bytes:
1123-
...
1123+
) -> bytes: ...
11241124

11251125

11261126
# compute=False returns dask.Delayed
@@ -1138,8 +1138,7 @@ def to_netcdf(
11381138
compute: Literal[False],
11391139
multifile: Literal[False] = False,
11401140
invalid_netcdf: bool = False,
1141-
) -> Delayed:
1142-
...
1141+
) -> Delayed: ...
11431142

11441143

11451144
# default return None
@@ -1156,8 +1155,7 @@ def to_netcdf(
11561155
compute: Literal[True] = True,
11571156
multifile: Literal[False] = False,
11581157
invalid_netcdf: bool = False,
1159-
) -> None:
1160-
...
1158+
) -> None: ...
11611159

11621160

11631161
# if compute cannot be evaluated at type check time
@@ -1175,8 +1173,7 @@ def to_netcdf(
11751173
compute: bool = False,
11761174
multifile: Literal[False] = False,
11771175
invalid_netcdf: bool = False,
1178-
) -> Delayed | None:
1179-
...
1176+
) -> Delayed | None: ...
11801177

11811178

11821179
# if multifile cannot be evaluated at type check time
@@ -1194,8 +1191,7 @@ def to_netcdf(
11941191
compute: bool = False,
11951192
multifile: bool = False,
11961193
invalid_netcdf: bool = False,
1197-
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None:
1198-
...
1194+
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: ...
11991195

12001196

12011197
# Any
@@ -1212,8 +1208,7 @@ def to_netcdf(
12121208
compute: bool = False,
12131209
multifile: bool = False,
12141210
invalid_netcdf: bool = False,
1215-
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None:
1216-
...
1211+
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: ...
12171212

12181213

12191214
def to_netcdf(
@@ -1647,8 +1642,7 @@ def to_zarr(
16471642
zarr_version: int | None = None,
16481643
write_empty_chunks: bool | None = None,
16491644
chunkmanager_store_kwargs: dict[str, Any] | None = None,
1650-
) -> backends.ZarrStore:
1651-
...
1645+
) -> backends.ZarrStore: ...
16521646

16531647

16541648
# compute=False returns dask.Delayed
@@ -1671,8 +1665,7 @@ def to_zarr(
16711665
zarr_version: int | None = None,
16721666
write_empty_chunks: bool | None = None,
16731667
chunkmanager_store_kwargs: dict[str, Any] | None = None,
1674-
) -> Delayed:
1675-
...
1668+
) -> Delayed: ...
16761669

16771670

16781671
def to_zarr(

xarray/backends/locks.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ class SerializableLock:
4040
The creation of locks is itself not threadsafe.
4141
"""
4242

43-
_locks: ClassVar[
44-
WeakValueDictionary[Hashable, threading.Lock]
45-
] = WeakValueDictionary()
43+
_locks: ClassVar[WeakValueDictionary[Hashable, threading.Lock]] = (
44+
WeakValueDictionary()
45+
)
4646
token: Hashable
4747
lock: threading.Lock
4848

xarray/coding/cftime_offsets.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Time offset classes for use with cftime.datetime objects"""
2+
23
# The offset classes and mechanisms for generating time ranges defined in
34
# this module were copied/adapted from those defined in pandas. See in
45
# particular the objects and methods defined in pandas.tseries.offsets

xarray/coding/cftimeindex.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""DatetimeIndex analog for cftime.datetime objects"""
2+
23
# The pandas.Index subclass defined here was copied and adapted for
34
# use with cftime.datetime objects based on the source code defining
45
# pandas.DatetimeIndex.

xarray/coding/frequencies.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""FrequencyInferer analog for cftime.datetime objects"""
2+
23
# The infer_freq method and the _CFTimeFrequencyInferer
34
# subclass defined here were copied and adapted for
45
# use with cftime.datetime objects based on the source code in

xarray/coding/strings.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Coders for strings."""
2+
23
from __future__ import annotations
34

45
from functools import partial

xarray/coding/variables.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Coders for individual Variable objects."""
2+
23
from __future__ import annotations
34

45
import warnings

xarray/convert.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Functions for converting to and from xarray objects
22
"""
3+
34
from collections import Counter
45

56
import numpy as np

xarray/core/_aggregations.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Mixin classes with reduction operations."""
2+
23
# This file was generated using xarray.util.generate_aggregations. Do not edit manually.
34

45
from __future__ import annotations

0 commit comments

Comments
 (0)