Skip to content

Commit

Permalink
[CodeStyle][C404] Unnecessary list comprehension (rewrite as a dict c…
Browse files Browse the repository at this point in the history
…omprehension) (#51969)
  • Loading branch information
enkilee authored Mar 23, 2023
1 parent 9796980 commit 1f8e6ad
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 4 deletions.
2 changes: 1 addition & 1 deletion python/paddle/distributed/passes/auto_parallel_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -1790,7 +1790,7 @@ def group_param(sharding_info, fuse_size):
class ShardingInfo:
def __init__(self, group, rank, params_grads, partition_algor):
self.group = group
self.params_grads = dict([(p.name, (p, g)) for p, g in params_grads])
self.params_grads = {p.name: (p, g) for p, g in params_grads}
assert len(self.params_grads) == len(
set(self.params_grads)
), "found duplicated param in params_grads"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,9 +231,7 @@ def gen_random_grad_tensor(grad):

def reader():
for _ in range(6):
yield dict(
[(grad.name, gen_random_grad_tensor(grad)) for grad in grads]
)
yield {grad.name: gen_random_grad_tensor(grad) for grad in grads}

scope = paddle.static.Scope()
fetch_list = params
Expand Down

0 comments on commit 1f8e6ad

Please sign in to comment.