Skip to content

Commit

Permalink
[CodeStyle][Ruff][BUAA][F-[1-10]] Fix ruff FURB diagnostic for 10 f…
Browse files Browse the repository at this point in the history
…iles in `python/paddle/{amp, distributed, happy, incubate}` (#67264)
  • Loading branch information
Whsjrczr authored Aug 9, 2024
1 parent b6c6a4e commit fe31daf
Show file tree
Hide file tree
Showing 10 changed files with 14 additions and 22 deletions.
2 changes: 1 addition & 1 deletion python/paddle/amp/accuracy_compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,5 +696,5 @@ def compare_accuracy(

print(f"-- Write to {output_filename}")

print("")
print()
excel_writer.close()
Original file line number Diff line number Diff line change
Expand Up @@ -590,11 +590,7 @@ def get_max_beta(self, ranks):
backward_order_beta = self.cluster.get_beta(
ranks[j], ranks[i]
)
beta = (
forward_order_beta
if forward_order_beta > backward_order_beta
else backward_order_beta
)
beta = max(backward_order_beta, forward_order_beta)
if max_beta is None:
max_beta = beta
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def parallelize(
# serialize the dist context by planner
if dist_context is not None:
logging.info("Start serialize searched dist attr")
cwd = pathlib.Path().resolve()
cwd = pathlib.Path().cwd()
searched_dist_context_path = os.path.join(
cwd, f"searched_dist_context_{time.time()}.pkl"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -819,7 +819,7 @@ def get_heights(suffixes, seq):
for i in range(1, len(seq)):
x = seq[suffixes[i - 1] :]
y = seq[suffixes[i] :]
max_len = len(x) if len(x) > len(y) else len(y)
max_len = max(len(y), len(x))
same_count = 0
for j in range(max_len):
if j >= len(x) or j >= len(y):
Expand Down Expand Up @@ -2379,10 +2379,8 @@ def layer_placement_pass(self, stages, layers, device_meshes):
]
)
cost, _ = self._get_sub_program_cost(dist_context)
max_stage_cost = (
min_max_stage_costs[s - 1][j]
if local_stage_cost < min_max_stage_costs[s - 1][j]
else local_stage_cost
max_stage_cost = max(
local_stage_cost, min_max_stage_costs[s - 1][j]
)

if cost <= min_cost:
Expand Down Expand Up @@ -2486,10 +2484,8 @@ def layer_placement_pass_new(self, stages, layers, device_meshes):
cost_strategies[s][i][j] = cost
memory_strategies[s][i][j] = memory

max_stage_cost = (
min_max_stage_costs[s - 1][j]
if local_stage_cost < min_max_stage_costs[s - 1][j]
else local_stage_cost
max_stage_cost = max(
local_stage_cost, min_max_stage_costs[s - 1][j]
)
if memory > sum(max_mem):
cost = sys.maxsize
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/fleet/elastic/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ def _parse_np(self, np: str):
min_np = int(np_dict[0])
max_np = int(np_dict[1])
min_np = 1 if min_np <= 0 else min_np
max_np = min_np if min_np > max_np else max_np
max_np = max(max_np, min_np)
else:
raise ValueError(
f'the np={np} needs to be in "MIN" or "MIN:MAX" format'
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/fleet/launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ def get_cluster_info(args):
os.environ["PADDLE_ENABLE_ELASTIC"] = str(
enable_elastic(args, device_mode)
)
cwd = pathlib.Path().resolve()
cwd = pathlib.Path().cwd()
rank_mapping_path = os.path.join(
cwd, "auto_parallel_rank_mapping.json"
)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/hapi/progressbar.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(
if isinstance(num, int) and num <= 0:
raise TypeError('num should be None or integer (> 0)')
max_width = self._get_max_width()
self._width = width if width <= max_width else max_width
self._width = min(width, max_width)
self._total_width = 0
self._verbose = verbose
self.file = file
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/incubate/autograd/generate_op_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def generate_code(
dct = {}
map_dct = {}
for op_path in [ops_yaml_path, ops_legacy_yaml_path]:
pattern = re.compile(r'[(](.*)[)]', re.S)
pattern = re.compile(r'[(](.*)[)]', re.DOTALL)
with open(op_path, "rt") as f:
ops = yaml.safe_load(f)
for item in ops:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def _save_param_attr(state_dict_, path, dims_mapping_dict=None):

# Why condition 'pp_rank < 0' exists?
# Because if pp_degree = 1, pp_rank is set -1
pp_rank = 0 if pp_rank <= 0 else pp_rank
pp_rank = max(0, pp_rank)

if dist.get_world_size() > 1:
process_group = _get_all_ranks_of_pp(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/incubate/jit/inference_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def __init__(self, func, used_as_at_decorator, **kwargs):
# get old d2s shapes!
if os.path.exists(d2s_input_info_path) and self.cache_static_model:
with open(d2s_input_info_path, "r") as f:
for line in f.readlines():
for line in f:
line = line.strip()
name_shape = line.split(":")
assert len(name_shape) == 2
Expand Down

0 comments on commit fe31daf

Please sign in to comment.