Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: erhoo82 <erhoo82@users.noreply.github.com>
  • Loading branch information
erhoo82 committed Feb 4, 2025
1 parent d5faad4 commit 49ff895
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 6 deletions.
4 changes: 3 additions & 1 deletion nemo/collections/llm/gpt/model/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,9 @@ def configure_model(self, tokenizer, pre_process=None, post_process=None) -> "MC
)

vp_size = self.virtual_pipeline_model_parallel_size
is_pipeline_asymmetric = getattr(self, 'account_for_embedding_in_pipeline_split', False) or getattr(self, 'account_for_loss_in_pipeline_split', False)
is_pipeline_asymmetric = getattr(self, 'account_for_embedding_in_pipeline_split', False) or getattr(
self, 'account_for_loss_in_pipeline_split', False
)
if vp_size and not is_pipeline_asymmetric:
p_size = self.pipeline_model_parallel_size
assert (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,10 @@ def __init__(self, cfg: DictConfig, trainer: Trainer, no_lm_init=True):
if vp_size == 1:
vp_size = None
else:
if not(self.cfg.get('account_for_embedding_in_pipeline_split', False) and self.cfg.get('account_for_loss_in_pipeline_split', False)):
if not (
self.cfg.get('account_for_embedding_in_pipeline_split', False)
and self.cfg.get('account_for_loss_in_pipeline_split', False)
):
assert (
self.cfg.num_layers // self.cfg.pipeline_model_parallel_size
) % vp_size == 0, 'Make sure the number of model chunks is the same across all pipeline stages.'
Expand Down Expand Up @@ -1021,7 +1024,10 @@ def _validate_and_override_config(self):
if vp_size == 1:
self.cfg['virtual_pipeline_model_parallel_size'] = None
else:
if not(self.cfg.get('account_for_embedding_in_pipeline_split', False) and self.cfg.get('account_for_loss_in_pipeline_split', False)):
if not (
self.cfg.get('account_for_embedding_in_pipeline_split', False)
and self.cfg.get('account_for_loss_in_pipeline_split', False)
):
assert (
self.cfg.num_layers // self.cfg.pipeline_model_parallel_size
) % vp_size == 0, 'Make sure the number of model chunks is the same across all pipeline stages.'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2192,7 +2192,10 @@ def build_transformer_config(self) -> TransformerConfig:
For attributes in TransformerConfig that are not in the nemo model config, we add custom logic.
"""

if not(self.cfg.get('account_for_embedding_in_pipeline_split', False) and self.cfg.get('account_for_loss_in_pipeline_split', False)):
if not (
self.cfg.get('account_for_embedding_in_pipeline_split', False)
and self.cfg.get('account_for_loss_in_pipeline_split', False)
):
if self.cfg.num_layers % self.cfg.get('pipeline_model_parallel_size', 1) != 0:
raise ValueError(
f"num_layers ({self.cfg.num_layers}) should be divisible by "
Expand Down
4 changes: 2 additions & 2 deletions nemo/lightning/pytorch/strategies/megatron_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,8 +262,8 @@ def __init__(
self.sequence_parallel = sequence_parallel
self.encoder_tensor_model_parallel_size = encoder_tensor_model_parallel_size
self.encoder_pipeline_model_parallel_size = encoder_pipeline_model_parallel_size
self.account_for_embedding_in_pipeline_split=account_for_embedding_in_pipeline_split
self.account_for_loss_in_pipeline_split=account_for_loss_in_pipeline_split
self.account_for_embedding_in_pipeline_split = account_for_embedding_in_pipeline_split
self.account_for_loss_in_pipeline_split = account_for_loss_in_pipeline_split
self.lazy_init = lazy_init
self.ckpt_load_optimizer = ckpt_load_optimizer
self.ckpt_save_optimizer = ckpt_save_optimizer
Expand Down

0 comments on commit 49ff895

Please sign in to comment.