Skip to content

Commit

Permalink
Upgrade ruff to 0.8.0 (#9816)
Browse files Browse the repository at this point in the history
  • Loading branch information
DimitriPapadopoulos authored Nov 25, 2024
1 parent 552a74b commit a765ae0
Show file tree
Hide file tree
Showing 14 changed files with 70 additions and 83 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ repos:
- id: text-unicode-replacement-char
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.2
rev: v0.8.0
hooks:
- id: ruff-format
- id: ruff
Expand Down
2 changes: 1 addition & 1 deletion asv_bench/benchmarks/dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def make_ds(self, nfiles=10):
ds.attrs = {"history": "created for xarray benchmarking"}

self.ds_list.append(ds)
self.filenames_list.append("test_netcdf_%i.nc" % i)
self.filenames_list.append(f"test_netcdf_{i}.nc")


class IOWriteMultipleNetCDF3(IOMultipleNetCDF):
Expand Down
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ dev = [
"pytest-env",
"pytest-xdist",
"pytest-timeout",
"ruff",
"ruff>=0.8.0",
"sphinx",
"sphinx_autosummary_accessors",
"xarray[complete]",
Expand Down Expand Up @@ -256,7 +256,6 @@ ignore = [
"E501", # line too long - let the formatter worry about that
"E731", # do not assign a lambda expression, use a def
"UP007", # use X | Y for type annotations
"UP027", # deprecated
"C40", # unnecessary generator, comprehension, or literal
"PIE790", # unnecessary pass statement
"PERF203", # try-except within a loop incurs performance overhead
Expand Down
6 changes: 3 additions & 3 deletions xarray/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@

# A hardcoded __all__ variable is necessary to appease
# `mypy --strict` running in projects that import xarray.
__all__ = (
__all__ = ( # noqa: RUF022
# Sub-packages
"groupers",
"testing",
Expand Down Expand Up @@ -117,8 +117,8 @@
"Context",
"Coordinates",
"DataArray",
"Dataset",
"DataTree",
"Dataset",
"Index",
"IndexSelResult",
"IndexVariable",
Expand All @@ -131,6 +131,6 @@
"SerializationWarning",
"TreeIsomorphismError",
# Constants
"__version__",
"ALL_DIMS",
"__version__",
)
3 changes: 1 addition & 2 deletions xarray/coding/cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1451,8 +1451,7 @@ def date_range_like(source, calendar, use_cftime=None):
from xarray.core.dataarray import DataArray

if not isinstance(source, pd.DatetimeIndex | CFTimeIndex) and (
isinstance(source, DataArray)
and (source.ndim != 1)
(isinstance(source, DataArray) and (source.ndim != 1))
or not _contains_datetime_like_objects(source.variable)
):
raise ValueError(
Expand Down
7 changes: 2 additions & 5 deletions xarray/conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,11 +726,8 @@ def _encode_coordinates(
)

# if coordinates set to None, don't write coordinates attribute
if (
"coordinates" in attrs
and attrs.get("coordinates") is None
or "coordinates" in encoding
and encoding.get("coordinates") is None
if ("coordinates" in attrs and attrs.get("coordinates") is None) or (
"coordinates" in encoding and encoding.get("coordinates") is None
):
# make sure "coordinates" is removed from attrs/encoding
attrs.pop("coordinates", None)
Expand Down
8 changes: 3 additions & 5 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5401,11 +5401,9 @@ def _get_stack_index(
and var.dims[0] == dim
and (
# stack: must be a single coordinate index
not multi
and not self.xindexes.is_multi(name)
(not multi and not self.xindexes.is_multi(name))
# unstack: must be an index that implements .unstack
or multi
and type(index).unstack is not Index.unstack
or (multi and type(index).unstack is not Index.unstack)
)
):
if stack_index is not None and index is not stack_index:
Expand Down Expand Up @@ -7617,7 +7615,7 @@ def from_dataframe(cls, dataframe: pd.DataFrame, sparse: bool = False) -> Self:

if isinstance(idx, pd.MultiIndex):
dims = tuple(
name if name is not None else "level_%i" % n # type: ignore[redundant-expr]
name if name is not None else f"level_{n}" # type: ignore[redundant-expr]
for n, name in enumerate(idx.names)
)
for dim, lev in zip(dims, idx.levels, strict=True):
Expand Down
7 changes: 3 additions & 4 deletions xarray/plot/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -869,11 +869,11 @@ def _infer_interval_breaks(coord, axis=0, scale=None, check_monotonic=False):
if check_monotonic and not _is_monotonic(coord, axis=axis):
raise ValueError(
"The input coordinate is not sorted in increasing "
"order along axis %d. This can lead to unexpected "
f"order along axis {axis}. This can lead to unexpected "
"results. Consider calling the `sortby` method on "
"the input DataArray. To plot data with categorical "
"axes, consider using the `heatmap` function from "
"the `seaborn` statistical plotting library." % axis
"the `seaborn` statistical plotting library."
)

# If logscale, compute the intervals in the logarithmic space
Expand Down Expand Up @@ -1708,8 +1708,7 @@ def _determine_guide(
if (
not add_colorbar
and (hueplt_norm.data is not None and hueplt_norm.data_is_numeric is False)
or sizeplt_norm.data is not None
):
) or sizeplt_norm.data is not None:
add_legend = True
else:
add_legend = False
Expand Down
8 changes: 4 additions & 4 deletions xarray/testing/assertions.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,8 @@ def assert_equal(a, b, check_dim_order: bool = True):
numpy.testing.assert_array_equal
"""
__tracebackhide__ = True
assert (
type(a) is type(b) or isinstance(a, Coordinates) and isinstance(b, Coordinates)
assert type(a) is type(b) or (
isinstance(a, Coordinates) and isinstance(b, Coordinates)
)
b = maybe_transpose_dims(a, b, check_dim_order)
if isinstance(a, Variable | DataArray):
Expand Down Expand Up @@ -163,8 +163,8 @@ def assert_identical(a, b):
assert_equal, assert_allclose, Dataset.equals, DataArray.equals
"""
__tracebackhide__ = True
assert (
type(a) is type(b) or isinstance(a, Coordinates) and isinstance(b, Coordinates)
assert type(a) is type(b) or (
isinstance(a, Coordinates) and isinstance(b, Coordinates)
)
if isinstance(a, Variable):
assert a.identical(b), formatting.diff_array_repr(a, b, "identical")
Expand Down
3 changes: 1 addition & 2 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,7 @@ def __call__(self, dsk, keys, **kwargs):
self.total_computes += 1
if self.total_computes > self.max_computes:
raise RuntimeError(
"Too many computes. Total: %d > max: %d."
% (self.total_computes, self.max_computes)
f"Too many computes. Total: {self.total_computes} > max: {self.max_computes}."
)
return dask.get(dsk, keys, **kwargs)

Expand Down
3 changes: 1 addition & 2 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -963,8 +963,7 @@ def test_roundtrip_mask_and_scale(self, decoded_fn, encoded_fn, dtype) -> None:
decoded = decoded_fn(dtype)
encoded = encoded_fn(dtype)
if decoded["x"].encoding["dtype"] == "u1" and not (
self.engine == "netcdf4"
and self.file_format is None
(self.engine == "netcdf4" and self.file_format is None)
or self.file_format == "NETCDF4"
):
pytest.skip("uint8 data can't be written to non-NetCDF4 data")
Expand Down
5 changes: 2 additions & 3 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,16 +388,15 @@ def test_unicode_data(self) -> None:

byteorder = "<" if sys.byteorder == "little" else ">"
expected = dedent(
"""\
f"""\
<xarray.Dataset> Size: 12B
Dimensions: (foø: 1)
Coordinates:
* foø (foø) %cU3 12B %r
* foø (foø) {byteorder}U3 12B {'ba®'!r}
Data variables:
*empty*
Attributes:
å: ∑"""
% (byteorder, "ba®")
)
actual = str(data)
assert expected == actual
Expand Down
14 changes: 6 additions & 8 deletions xarray/tests/test_formatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ def test_diff_array_repr(self) -> None:

byteorder = "<" if sys.byteorder == "little" else ">"
expected = dedent(
"""\
f"""\
Left and right DataArray objects are not identical
Differing dimensions:
(x: 2, y: 3) != (x: 2)
Expand All @@ -306,8 +306,8 @@ def test_diff_array_repr(self) -> None:
R
array([1, 2], dtype=int64)
Differing coordinates:
L * x (x) %cU1 8B 'a' 'b'
R * x (x) %cU1 8B 'a' 'c'
L * x (x) {byteorder}U1 8B 'a' 'b'
R * x (x) {byteorder}U1 8B 'a' 'c'
Coordinates only on the left object:
* y (y) int64 24B 1 2 3
Coordinates only on the right object:
Expand All @@ -317,7 +317,6 @@ def test_diff_array_repr(self) -> None:
R units: kg
Attributes only on the left object:
description: desc"""
% (byteorder, byteorder)
)

actual = formatting.diff_array_repr(da_a, da_b, "identical")
Expand Down Expand Up @@ -496,15 +495,15 @@ def test_diff_dataset_repr(self) -> None:

byteorder = "<" if sys.byteorder == "little" else ">"
expected = dedent(
"""\
f"""\
Left and right Dataset objects are not identical
Differing dimensions:
(x: 2, y: 3) != (x: 2)
Differing coordinates:
L * x (x) %cU1 8B 'a' 'b'
L * x (x) {byteorder}U1 8B 'a' 'b'
Differing variable attributes:
foo: bar
R * x (x) %cU1 8B 'a' 'c'
R * x (x) {byteorder}U1 8B 'a' 'c'
Differing variable attributes:
source: 0
foo: baz
Expand All @@ -522,7 +521,6 @@ def test_diff_dataset_repr(self) -> None:
R title: newtitle
Attributes only on the left object:
description: desc"""
% (byteorder, byteorder)
)

actual = formatting.diff_dataset_repr(ds_a, ds_b, "identical")
Expand Down
82 changes: 41 additions & 41 deletions xarray/ufuncs.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,70 +247,45 @@ def _dedent(doc):
"absolute",
"acos",
"acosh",
"add",
"angle",
"arccos",
"arccosh",
"arcsin",
"arcsinh",
"arctan",
"arctan2",
"arctanh",
"asin",
"asinh",
"atan",
"atan2",
"atanh",
"bitwise_and",
"bitwise_count",
"bitwise_invert",
"bitwise_left_shift",
"bitwise_not",
"bitwise_or",
"bitwise_right_shift",
"bitwise_xor",
"cbrt",
"ceil",
"conj",
"conjugate",
"copysign",
"cos",
"cosh",
"deg2rad",
"degrees",
"divide",
"equal",
"exp",
"exp2",
"expm1",
"fabs",
"floor",
"invert",
"isfinite",
"isinf",
"isnan",
"isnat",
"log",
"log10",
"log1p",
"log2",
"logical_not",
"negative",
"positive",
"rad2deg",
"radians",
"reciprocal",
"rint",
"sign",
"signbit",
"sin",
"sinh",
"spacing",
"sqrt",
"square",
"tan",
"tanh",
"trunc",
"add",
"arctan2",
"atan2",
"bitwise_and",
"bitwise_left_shift",
"bitwise_or",
"bitwise_right_shift",
"bitwise_xor",
"copysign",
"divide",
"equal",
"float_power",
"floor",
"floor_divide",
"fmax",
"fmin",
Expand All @@ -320,29 +295,54 @@ def _dedent(doc):
"greater_equal",
"heaviside",
"hypot",
"invert",
"iscomplex",
"isfinite",
"isinf",
"isnan",
"isnat",
"isreal",
"lcm",
"ldexp",
"left_shift",
"less",
"less_equal",
"log",
"log1p",
"log2",
"log10",
"logaddexp",
"logaddexp2",
"logical_and",
"logical_not",
"logical_or",
"logical_xor",
"maximum",
"minimum",
"mod",
"multiply",
"negative",
"nextafter",
"not_equal",
"positive",
"pow",
"power",
"rad2deg",
"radians",
"reciprocal",
"remainder",
"right_shift",
"rint",
"sign",
"signbit",
"sin",
"sinh",
"spacing",
"sqrt",
"square",
"subtract",
"tan",
"tanh",
"true_divide",
"angle",
"isreal",
"iscomplex",
"trunc",
]

0 comments on commit a765ae0

Please sign in to comment.