Skip to content

Commit

Permalink
[CodeStyle][Ruff][BUAA][D-[1-6]] Fix ruff RUF015 diagnostic for 6 f…
Browse files Browse the repository at this point in the history
…iles in `python/paddle/` (#67225)
  • Loading branch information
MufanColin authored Aug 9, 2024
1 parent 8ce0089 commit 7eb71dd
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 12 deletions.
2 changes: 1 addition & 1 deletion paddle/fluid/pir/dialect/op_generator/api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -906,7 +906,7 @@ def _gen_one_impl(
)

kernel_name = (
list(dispatch_kernel.keys())[0]
next(iter(dispatch_kernel.keys()))
if dispatch_kernel and len(dispatch_kernel.keys()) == 1
else op_name
)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/dygraph/tensor_patch_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,7 +763,7 @@ def get_device_dtype_from_tensor(other):
if len(invalid_keys) != 0:
raise TypeError(
"to() got an unexpected keyword argument "
+ list(invalid_keys)[0]
+ next(iter(invalid_keys))
)
if size_args > 0:
if isinstance(args[0], paddle.Tensor):
Expand Down
10 changes: 5 additions & 5 deletions python/paddle/base/dygraph/tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def eager_legacy_trace_op(

if op_type == 'load_combine':
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
key = next(iter(outputs.keys()))
for j in range(len(returns)):
returns[j]._share_underline_tensor_to(outputs[key][j])
return
Expand All @@ -200,12 +200,12 @@ def eager_legacy_trace_op(
)
elif isinstance(returns, list):
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
key = next(iter(outputs.keys()))
for j in range(len(returns)):
outputs[key][j].reconstruct_from_(returns[j], False)
else:
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
key = next(iter(outputs.keys()))
if isinstance(outputs[key], list):
outputs[key][0].reconstruct_from_(returns, False)
else:
Expand Down Expand Up @@ -285,12 +285,12 @@ def eager_trace_op(
outputs[retname][0].reconstruct_from_(returns[i], False)
elif isinstance(returns, list):
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
key = next(iter(outputs.keys()))
for j in range(len(returns)):
outputs[key][j].reconstruct_from_(returns[j], False)
else:
assert len(outputs.keys()) == 1
key = list(outputs.keys())[0]
key = next(iter(outputs.keys()))
if isinstance(outputs[key], list):
outputs[key][0].reconstruct_from_(returns, False)
else:
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -3367,8 +3367,8 @@ def find_name(var_list, name):
if type in special_op_attrs:
attrs = special_op_attrs.get(type, [])
for attr in attrs:
a_name = list(attr.keys())[0]
default_value = list(attr.values())[0]
a_name = next(iter(attr.keys()))
default_value = next(iter(attr.values()))
if (
a_name in op_attrs.keys()
and default_value != op_attrs[a_name]
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/auto_parallel/static/reshard.py
Original file line number Diff line number Diff line change
Expand Up @@ -1511,7 +1511,7 @@ def find_op_desc_seq(
if is_union_process_mesh_tensor:
assert (
len(set(source_dims_mapping)) == 1
and list(set(source_dims_mapping))[0] == -1
and next(iter(set(source_dims_mapping))) == -1
)
if set(target_process_group).intersection(
set(source_process_group)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,11 +273,11 @@ def _init_communication_group(self):
# Create mp rings
if self.num_mp > 1:
mp_endpoints = [self.endpoints[mp_idx] for mp_idx in self.mp_group]
mp_rank = [
mp_rank = next(
idx
for idx, mp_idx in enumerate(self.mp_group)
if mp_idx == self.rank
][0]
)
collective_helper._init_communicator(
self._startup_program,
self.current_endpoint,
Expand Down

0 comments on commit 7eb71dd

Please sign in to comment.