From 261bf0c5ef0b922c7fb09bfb7ca3a23f645a832c Mon Sep 17 00:00:00 2001 From: Rahul A R Date: Mon, 22 Aug 2022 10:24:48 -0400 Subject: [PATCH 1/3] fixed incorrect param to hasattr --- examples/pytorch/summarization/run_summarization_no_trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/pytorch/summarization/run_summarization_no_trainer.py b/examples/pytorch/summarization/run_summarization_no_trainer.py index 96781b6dcadbdd..ad925327ea42ab 100644 --- a/examples/pytorch/summarization/run_summarization_no_trainer.py +++ b/examples/pytorch/summarization/run_summarization_no_trainer.py @@ -573,7 +573,7 @@ def postprocess_text(preds, labels): args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch) # Figure out how many steps we should save the Accelerator states - if hasattr(args.checkpointing_steps, "isdigit"): + if hasattr(args, "checkpointing_steps"): checkpointing_steps = args.checkpointing_steps if args.checkpointing_steps.isdigit(): checkpointing_steps = int(args.checkpointing_steps) From d648ecc2c443e2b5fde68e70d15b0e54bee27f43 Mon Sep 17 00:00:00 2001 From: Rahul A R Date: Wed, 24 Aug 2022 11:25:43 -0400 Subject: [PATCH 2/3] simplified condition checks --- .../summarization/run_summarization_no_trainer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/examples/pytorch/summarization/run_summarization_no_trainer.py b/examples/pytorch/summarization/run_summarization_no_trainer.py index ad925327ea42ab..4b5760a3cffced 100644 --- a/examples/pytorch/summarization/run_summarization_no_trainer.py +++ b/examples/pytorch/summarization/run_summarization_no_trainer.py @@ -573,12 +573,9 @@ def postprocess_text(preds, labels): args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch) # Figure out how many steps we should save the Accelerator states - if hasattr(args, "checkpointing_steps"): - checkpointing_steps = args.checkpointing_steps - if args.checkpointing_steps.isdigit(): - checkpointing_steps = int(args.checkpointing_steps) - else: - checkpointing_steps = None + checkpointing_steps = args.checkpointing_steps + if checkpointing_steps is not None and checkpointing_steps.isdigit(): + checkpointing_steps = int(checkpointing_steps) # We need to initialize the trackers we use, and also store our configuration. # We initialize the trackers only on main process because `accelerator.log` From b8e2d96a34bd508651e4af764ef3510aea56da40 Mon Sep 17 00:00:00 2001 From: Rahul A R Date: Wed, 24 Aug 2022 11:34:03 -0400 Subject: [PATCH 3/3] code cleanup --- examples/pytorch/summarization/run_summarization_no_trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/pytorch/summarization/run_summarization_no_trainer.py b/examples/pytorch/summarization/run_summarization_no_trainer.py index 4b5760a3cffced..3a276ecca192ab 100644 --- a/examples/pytorch/summarization/run_summarization_no_trainer.py +++ b/examples/pytorch/summarization/run_summarization_no_trainer.py @@ -575,7 +575,7 @@ def postprocess_text(preds, labels): # Figure out how many steps we should save the Accelerator states checkpointing_steps = args.checkpointing_steps if checkpointing_steps is not None and checkpointing_steps.isdigit(): - checkpointing_steps = int(checkpointing_steps) + checkpointing_steps = int(checkpointing_steps) # We need to initialize the trackers we use, and also store our configuration. # We initialize the trackers only on main process because `accelerator.log`