Skip to content

Commit ee70af1

Browse files
authored
[CodeStyle] black -> ruff format migration - part 16 (#74670)
* [CodeStyle] `black -> ruff format` migration - part 16 * fix f-string
1 parent adfd2d7 commit ee70af1

40 files changed

+363
-271
lines changed

python/paddle/incubate/fp8/deep_gemm/jit/compiler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def get_jit_include_dir() -> str:
4545
@functools.cache
4646
def get_deep_gemm_version() -> str:
4747
# Update include directories
48-
include_dir = f"{get_jit_include_dir()+'/../../../../include/paddle/fluid/fp8/deep_gemm/include'}"
48+
include_dir = f"{get_jit_include_dir()}/../../../../include/paddle/fluid/fp8/deep_gemm/include"
4949
assert os.path.exists(
5050
include_dir
5151
), f"Cannot find GEMM include directory {include_dir}"

python/paddle/incubate/fp8/deep_gemm/jit/interleave_ffma.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,9 +104,10 @@ def modify_segment(m, name, ffma_lines):
104104
for i in range(num_lines // 2):
105105
dst_reg = parse_registers(ffma_lines[i * 2])[-2]
106106
low_line, high_line = ffma_lines[i * 2], ffma_lines[i * 2 + 1]
107-
low_hex, high_hex = extract_hex_from_line(
108-
low_line
109-
), extract_hex_from_line(high_line)
107+
low_hex, high_hex = (
108+
extract_hex_from_line(low_line),
109+
extract_hex_from_line(high_line),
110+
)
110111
le_bytes.append(
111112
low_hex.to_bytes(8, "little") + high_hex.to_bytes(8, "little")
112113
)

python/paddle/incubate/fp8/deep_gemm/jit_kernels/gemm.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,10 @@ def get_best_configs(
118118
for block_m in block_ms:
119119
for block_n in block_ns:
120120
success = False
121-
num_waves, best_num_waves = get_num_waves(
122-
block_m, block_n
123-
), get_num_waves(best_block_m, best_block_n)
121+
num_waves, best_num_waves = (
122+
get_num_waves(block_m, block_n),
123+
get_num_waves(best_block_m, best_block_n),
124+
)
124125
if best_block_m is None or best_block_n is None:
125126
success = True
126127
elif num_waves < best_num_waves:

python/paddle/incubate/nn/layer/fused_transformer.py

Lines changed: 21 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -147,10 +147,9 @@ def __init__(
147147
name: str | None = None,
148148
) -> None:
149149
super().__init__()
150-
assert embed_dim > 0, (
151-
"Expected embed_dim to be greater than 0, "
152-
f"but received {embed_dim}"
153-
)
150+
assert (
151+
embed_dim > 0
152+
), f"Expected embed_dim to be greater than 0, but received {embed_dim}"
154153
self._dtype = self._helper.get_default_dtype()
155154
self._bias_attr = bias_attr
156155
self._weight_attr = weight_attr
@@ -338,13 +337,12 @@ def __init__(
338337
) -> None:
339338
super().__init__()
340339

341-
assert embed_dim > 0, (
342-
"Expected embed_dim to be greater than 0, "
343-
f"but received {embed_dim}"
344-
)
345-
assert num_heads > 0, (
346-
"Expected nhead to be greater than 0, " f"but received {num_heads}"
347-
)
340+
assert (
341+
embed_dim > 0
342+
), f"Expected embed_dim to be greater than 0, but received {embed_dim}"
343+
assert (
344+
num_heads > 0
345+
), f"Expected nhead to be greater than 0, but received {num_heads}"
348346

349347
self.normalize_before = normalize_before
350348
self._dtype = self._helper.get_default_dtype()
@@ -830,12 +828,12 @@ def __init__(
830828
self._config.pop("__class__", None) # py3
831829

832830
super().__init__()
833-
assert d_model > 0, (
834-
"Expected d_model to be greater than 0, " f"but received {d_model}"
835-
)
836-
assert nhead > 0, (
837-
"Expected nhead to be greater than 0, " f"but received {nhead}"
838-
)
831+
assert (
832+
d_model > 0
833+
), f"Expected d_model to be greater than 0, but received {d_model}"
834+
assert (
835+
nhead > 0
836+
), f"Expected nhead to be greater than 0, but received {nhead}"
839837
assert dim_feedforward > 0, (
840838
"Expected dim_feedforward to be greater than 0, "
841839
f"but received {dim_feedforward}"
@@ -1306,13 +1304,12 @@ def __init__(
13061304
) -> None:
13071305
super().__init__()
13081306

1309-
assert embed_dim > 0, (
1310-
"Expected embed_dim to be greater than 0, "
1311-
f"but received {embed_dim}"
1312-
)
1313-
assert num_heads > 0, (
1314-
"Expected nhead to be greater than 0, " f"but received {num_heads}"
1315-
)
1307+
assert (
1308+
embed_dim > 0
1309+
), f"Expected embed_dim to be greater than 0, but received {embed_dim}"
1310+
assert (
1311+
num_heads > 0
1312+
), f"Expected nhead to be greater than 0, but received {num_heads}"
13161313
assert (
13171314
dim_feedforward > 0
13181315
), f"Expected dim_feedforward to be greater than 0, but received {dim_feedforward}"

python/paddle/incubate/operators/graph_khop_sampler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ def graph_khop_sampler(
130130
if return_eids:
131131
if sorted_eids is None:
132132
raise ValueError(
133-
"`sorted_eid` should not be None " "if return_eids is True."
133+
"`sorted_eid` should not be None if return_eids is True."
134134
)
135135
(
136136
edge_src,
@@ -171,7 +171,7 @@ def graph_khop_sampler(
171171
if return_eids:
172172
if sorted_eids is None:
173173
raise ValueError(
174-
"`sorted_eid` should not be None " "if return_eids is True."
174+
"`sorted_eid` should not be None if return_eids is True."
175175
)
176176
check_variable_and_dtype(
177177
sorted_eids, "Eids", ("int32", "int64"), "graph_khop_sampler"

python/paddle/incubate/operators/graph_sample_neighbors.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,8 +157,7 @@ def graph_sample_neighbors(
157157
if flag_perm_buffer:
158158
if perm_buffer is None:
159159
raise ValueError(
160-
"`perm_buffer` should not be None if `flag_perm_buffer`"
161-
"is True."
160+
"`perm_buffer` should not be None if `flag_perm_buffer` is True."
162161
)
163162

164163
if in_dynamic_or_pir_mode():

python/paddle/incubate/optimizer/pipeline.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -481,10 +481,9 @@ def _get_op_device_attr(self, op):
481481
else None
482482
)
483483
if device:
484-
assert device[0:3] == 'gpu', (
485-
"Now, only gpu devices are "
486-
"supported in pipeline parallelism."
487-
)
484+
assert (
485+
device[0:3] == 'gpu'
486+
), "Now, only gpu devices are supported in pipeline parallelism."
488487
return device
489488

490489
def _add_op_device_attr_for_op(self, op, idx, block):
@@ -669,17 +668,16 @@ def _check_validation(self, block):
669668
), f"op ({op.type}) has no {self._op_device_key} attribute."
670669

671670
device = op.attr(self._op_device_key)
672-
assert device, (
673-
"op_device attribute for op " f"{op.type} has not been set."
674-
)
671+
assert (
672+
device
673+
), f"op_device attribute for op {op.type} has not been set."
675674
if device == f"{self._device}:all":
676675
continue
677676

678677
dev_type = device.split(':')[0]
679-
assert dev_type == "gpu", (
680-
"Now only gpu devices are supported "
681-
"for pipeline parallelism."
682-
)
678+
assert (
679+
dev_type == "gpu"
680+
), "Now only gpu devices are supported for pipeline parallelism."
683681

684682
if device not in device_list:
685683
device_list.append(device)

python/paddle/io/dataloader/dataloader_iter.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -376,10 +376,9 @@ def __init__(self, loader):
376376
self._persistent_workers = loader._persistent_workers
377377
self._resume_worker_cnt = 0
378378

379-
assert self._num_workers > 0, (
380-
"Multi-process DataLoader "
381-
f"invalid num_workers({self._num_workers})"
382-
)
379+
assert (
380+
self._num_workers > 0
381+
), f"Multi-process DataLoader invalid num_workers({self._num_workers})"
383382

384383
# subprocess wrokers' result queue
385384
self._data_queue = None

python/paddle/io/dataloader/dataset.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -87,14 +87,16 @@ def __init__(self) -> None:
8787

8888
def __getitem__(self, idx: int) -> _T:
8989
raise NotImplementedError(
90-
"'{}' not implement in class "
91-
"{}".format('__getitem__', self.__class__.__name__)
90+
"'{}' not implement in class {}".format(
91+
'__getitem__', self.__class__.__name__
92+
)
9293
)
9394

9495
def __len__(self) -> int:
9596
raise NotImplementedError(
96-
"'{}' not implement in class "
97-
"{}".format('__len__', self.__class__.__name__)
97+
"'{}' not implement in class {}".format(
98+
'__len__', self.__class__.__name__
99+
)
98100
)
99101

100102
if TYPE_CHECKING:
@@ -268,20 +270,23 @@ def __init__(self) -> None:
268270

269271
def __iter__(self) -> Iterator[_T]:
270272
raise NotImplementedError(
271-
"'{}' not implement in class "
272-
"{}".format('__iter__', self.__class__.__name__)
273+
"'{}' not implement in class {}".format(
274+
'__iter__', self.__class__.__name__
275+
)
273276
)
274277

275278
def __getitem__(self, idx: int) -> Never:
276279
raise RuntimeError(
277-
"'{}' should not be called for IterableDataset"
278-
"{}".format('__getitem__', self.__class__.__name__)
280+
"'{}' should not be called for IterableDataset{}".format(
281+
'__getitem__', self.__class__.__name__
282+
)
279283
)
280284

281285
def __len__(self) -> Never:
282286
raise RuntimeError(
283-
"'{}' should not be called for IterableDataset"
284-
"{}".format('__len__', self.__class__.__name__)
287+
"'{}' should not be called for IterableDataset{}".format(
288+
'__len__', self.__class__.__name__
289+
)
285290
)
286291

287292

python/paddle/jit/dy2static/error.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def numpy_api_check(self, format_exception, error_line):
211211
func_str = None
212212
for frame in tb:
213213
searched_name = re.search(
214-
fr'({RE_PYMODULE})*{frame.name}',
214+
rf'({RE_PYMODULE})*{frame.name}',
215215
error_line,
216216
)
217217
if searched_name:

0 commit comments

Comments
 (0)