Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dummy class for ModelParallelConfig #7254

Merged
merged 12 commits into from
Aug 17, 2023
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
clip_grad_norm_fp32,
)
from nemo.collections.nlp.modules.common.megatron.megatron_init import initialize_model_parallel_for_nemo
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults
from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
from nemo.collections.nlp.parts.nlp_overrides import NEMO_MEGATRON_MODEL_PARALLEL_APPSTATE_OVERRIDE, GradScaler
from nemo.core.optim import MainParamsOptimizerWrapper, prepare_lr_scheduler
Expand All @@ -57,6 +58,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronBaseModel"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
VirtualPromptSource,
VirtualPromptStyle,
)
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults
from nemo.collections.nlp.modules.common.transformer.text_generation import TextGeneration
from nemo.collections.nlp.parts.nlp_overrides import GradScaler
from nemo.utils import AppState, logging
Expand All @@ -52,6 +53,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from nemo.collections.nlp.modules.common.megatron.build_model import build_model
from nemo.collections.nlp.modules.common.megatron.module import Float16Module
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_all_params_for_weight_decay_optimization,
get_ltor_masks_and_position_ids,
Expand Down Expand Up @@ -84,6 +85,8 @@

except (ImportError, ModuleNotFoundError):

TransformerConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common import VirtualPromptPlaceholderToken, VirtualPromptSource, VirtualPromptStyle
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_iterator_k_split,
)
Expand Down Expand Up @@ -61,6 +62,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
MegatronTokenLevelEncoderDecoderModule,
)
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_params_for_weight_decay_optimization,
)
Expand Down Expand Up @@ -67,6 +68,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults
Fixed Show fixed Hide fixed

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronLMEncoderDecoderModel"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTransformerEncoderDecoderModule"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTransformerEncoderModule"]
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,11 @@
from megatron.core import ModelParallelConfig, tensor_parallel

HAVE_MEGATRON_CORE = True

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

MIN_DIM_HEAD = 32
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTokenLevelHead", "MegatronTokenLevelEncoderDecoderModule"]
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,15 @@
HAVE_APEX = False

try:
from megatron.core import ModelParallelConfig, parallel_state, tensor_parallel

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'ModelParallelConfig' is not used.
from megatron.core.tensor_parallel.layers import linear_with_grad_accumulation_and_async_allreduce

HAVE_MEGATRON_CORE = True

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
4 changes: 3 additions & 1 deletion nemo/collections/nlp/modules/common/prompt_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from torch import nn

from nemo.collections.nlp.modules.common.megatron.fused_bias_gelu import fused_bias_gelu
from nemo.collections.nlp.modules.common.megatron.utils import init_method_normal
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults, init_method_normal
from nemo.core.classes import Exportable, NeuralModule
from nemo.core.classes.common import typecheck

Expand All @@ -31,6 +31,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
4 changes: 3 additions & 1 deletion nemo/collections/nlp/modules/common/text_generation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import torch.nn.functional as F

from nemo.collections.common.tokenizers.tabular_tokenizer import TabularTokenizer
from nemo.collections.nlp.modules.common.megatron.utils import get_ltor_masks_and_position_ids
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults, get_ltor_masks_and_position_ids
from nemo.collections.nlp.modules.common.text_generation_strategy import model_inference_strategy_dispatcher
from nemo.collections.nlp.modules.common.transformer.text_generation import LengthParam, OutputType, SamplingParam
from nemo.utils import AppState
Expand All @@ -45,6 +45,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults
Fixed Show fixed Hide fixed

HAVE_MEGATRON_CORE = False

__all__ = [
Expand Down
Loading