From b67297d23813fa884ad27cb6553c8230e1d2d032 Mon Sep 17 00:00:00 2001 From: Sandeep Subramanian Date: Tue, 2 May 2023 20:22:55 -0700 Subject: [PATCH] Fix fp16 (#6543) Signed-off-by: MaximumEntropy --- examples/nlp/language_modeling/megatron_gpt_eval.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/nlp/language_modeling/megatron_gpt_eval.py b/examples/nlp/language_modeling/megatron_gpt_eval.py index d797937850e0..00b53a9f6f8f 100644 --- a/examples/nlp/language_modeling/megatron_gpt_eval.py +++ b/examples/nlp/language_modeling/megatron_gpt_eval.py @@ -196,6 +196,8 @@ def main(cfg) -> None: pretrained_cfg.activations_checkpoint_granularity = None pretrained_cfg.activations_checkpoint_method = None pretrained_cfg.precision = trainer.precision + if trainer.precision == "16": + pretrained_cfg.megatron_amp_O2 = False model = MegatronGPTModel.restore_from( restore_path=cfg.gpt_model_file, trainer=trainer,