From 8b18cfca7cdbdb1d7695e3ba7d73a172000d4b34 Mon Sep 17 00:00:00 2001 From: Dmytro Pykhtar <37850217+dimapihtar@users.noreply.github.com> Date: Fri, 2 Feb 2024 23:10:14 +0200 Subject: [PATCH] remove assertion (#8302) Signed-off-by: dimapihtar --- .../nlp/models/language_modeling/megatron_gpt_model.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py index ecb5db742785..3f09776a6d78 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py @@ -1304,11 +1304,6 @@ def setup(self, stage=None): self.init_global_step = self.trainer.global_step if self.rampup_batch_size: - optimizer = self.cfg.optim.get('name', None) - assert ( - optimizer == 'fused_adam' - ), f'{optimizer} optimizer is not supported yet with rampup batch size. Please, use fused_adam optimizer instead.' - num_microbatch_calculator = apex.transformer.pipeline_parallel.utils._GLOBAL_NUM_MICROBATCHES_CALCULATOR num_microbatch_calculator.update(self.init_consumed_samples, consistency_check=False) self.prev_consumed_samples = self.init_consumed_samples