Skip to content

Commit 17a245a

Browse files
flake8, bugbear, pyupgrade → ruff (#12002)
Co-authored-by: Jacob Tomlinson <jacobtomlinson@users.noreply.github.com>
1 parent a9d0e52 commit 17a245a

21 files changed

+65
-86
lines changed

.flake8

Lines changed: 0 additions & 41 deletions
This file was deleted.

.pre-commit-config.yaml

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,33 +9,22 @@ repos:
99
hooks:
1010
- id: absolufy-imports
1111
name: absolufy-imports
12+
- repo: https://github.com/astral-sh/ruff-pre-commit
13+
rev: v0.14.2
14+
hooks:
15+
- id: ruff-check
1216
- repo: https://github.com/pycqa/isort
1317
rev: 6.0.1
1418
hooks:
1519
- id: isort
1620
language_version: python3
17-
- repo: https://github.com/asottile/pyupgrade
18-
rev: v3.19.1
19-
hooks:
20-
- id: pyupgrade
21-
args:
22-
- --py310-plus
2321
- repo: https://github.com/psf/black
2422
rev: 25.1.0
2523
hooks:
2624
- id: black
2725
language_version: python3
2826
args:
2927
- --target-version=py310
30-
- repo: https://github.com/pycqa/flake8
31-
rev: 7.2.0
32-
hooks:
33-
- id: flake8
34-
language_version: python3
35-
additional_dependencies:
36-
# NOTE: autoupdate does not pick up flake8-bugbear since it is a transitive
37-
# dependency. Make sure to update flake8-bugbear manually on a regular basis.
38-
- flake8-bugbear==24.8.19
3928
- repo: https://github.com/codespell-project/codespell
4029
rev: v2.4.1
4130
hooks:

dask/array/linalg.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1256,7 +1256,7 @@ def solve(a, b, sym_pos=None, assume_a="gen"):
12561256
b = p.T.dot(b)
12571257
else:
12581258
raise ValueError(
1259-
f"{assume_a = } is not a recognized matrix structure, " # noqa: E251
1259+
f"{assume_a = } is not a recognized matrix structure, "
12601260
"valid structures in Dask are 'pos' and 'gen'."
12611261
)
12621262

dask/array/numpy_compat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919

2020
if NUMPY_GE_200:
21-
from numpy.exceptions import AxisError, ComplexWarning # noqa: F401
21+
from numpy.exceptions import AxisError, ComplexWarning
2222
from numpy.lib.array_utils import normalize_axis_index, normalize_axis_tuple
2323
else:
2424
from numpy import ( # type: ignore[no-redef, attr-defined] # noqa: F401

dask/array/routines.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
tensordot_lookup,
3030
)
3131
from dask.array.creation import arange, diag, empty, indices, tri
32-
from dask.array.einsumfuncs import einsum # noqa
32+
from dask.array.einsumfuncs import einsum # noqa: F401
3333
from dask.array.numpy_compat import NUMPY_GE_200
3434
from dask.array.reductions import reduction
3535
from dask.array.ufunc import multiply, sqrt, true_divide
@@ -2086,7 +2086,7 @@ def _asarray_isnull(values):
20862086
def isnull(values):
20872087
"""pandas.isnull for dask arrays"""
20882088
# eagerly raise ImportError, if pandas isn't available
2089-
import pandas as pd # noqa
2089+
import pandas as pd # noqa: F401
20902090

20912091
return elemwise(_asarray_isnull, values, dtype="bool")
20922092

dask/array/stats.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def skew(a, axis=0, bias=True, nan_policy="propagate"):
242242
"`nan_policy` other than 'propagate' have not been implemented."
243243
)
244244

245-
n = a.shape[axis] # noqa; for bias
245+
n = a.shape[axis] # noqa: F841 # for bias
246246
m2 = moment(a, 2, axis)
247247
m3 = moment(a, 3, axis)
248248
zero = m2 == 0
@@ -298,7 +298,7 @@ def kurtosis(a, axis=0, fisher=True, bias=True, nan_policy="propagate"):
298298
raise NotImplementedError(
299299
"`nan_policy` other than 'propagate' have not been implemented."
300300
)
301-
n = a.shape[axis] # noqa; for bias
301+
n = a.shape[axis] # noqa: F841 # for bias
302302
m2 = moment(a, 2, axis)
303303
m4 = moment(a, 4, axis)
304304
zero = m2 == 0

dask/dataframe/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def _dask_expr_enabled() -> bool:
1414
_dask_expr_enabled()
1515

1616

17-
import dask.array._array_expr._backends # Import this to register array dispatch # noqa: F401
17+
import dask.array._array_expr._backends # Import this to register array dispatch
1818

1919
# Ensure that dtypes are registered
2020
import dask.dataframe._dtypes

dask/dataframe/dask_expr/_repartition.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,7 @@ def _divisions(self):
459459

460460
def _lower(self):
461461
# populate cache
462-
self._mem_usage # noqa
462+
self._mem_usage
463463
return super()._lower()
464464

465465
def _layer(self) -> dict:

dask/dataframe/dask_expr/io/parquet.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1843,9 +1843,7 @@ def _divisions_from_statistics(aggregated_stats, index_name):
18431843
col_ix = ix
18441844
break
18451845
else:
1846-
raise ValueError(
1847-
f"Index column {index_name} not found in statistics" # noqa: E713
1848-
)
1846+
raise ValueError(f"Index column {index_name} not found in statistics")
18491847
last_max = None
18501848
minmax = []
18511849
for file_stats in aggregated_stats:

dask/dataframe/dask_expr/io/tests/test_distributed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
distributed = pytest.importorskip("distributed")
88

9-
from distributed.utils_test import * # noqa F401, F403
9+
from distributed.utils_test import * # noqa: F401, F403
1010
from distributed.utils_test import gen_cluster
1111

1212
import dask.dataframe as dd

0 commit comments

Comments
 (0)