From 18d5b34b64a8b1c407f673cc2914ad9bfa4cb2d0 Mon Sep 17 00:00:00 2001 From: dimapihtar Date: Thu, 1 Feb 2024 06:38:41 -0800 Subject: [PATCH] remove assertion Signed-off-by: dimapihtar --- .../nlp/models/language_modeling/megatron_gpt_model.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py index c2b97caf12a3..65a8d66c245e 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py @@ -1305,11 +1305,6 @@ def setup(self, stage=None): self.init_global_step = self.trainer.global_step if self.rampup_batch_size: - optimizer = self.cfg.optim.get('name', None) - assert ( - optimizer == 'fused_adam' - ), f'{optimizer} optimizer is not supported yet with rampup batch size. Please, use fused_adam optimizer instead.' - num_microbatch_calculator = apex.transformer.pipeline_parallel.utils._GLOBAL_NUM_MICROBATCHES_CALCULATOR num_microbatch_calculator.update(self.init_consumed_samples, consistency_check=False) self.prev_consumed_samples = self.init_consumed_samples