Skip to content

Commit

Permalink
Dummy class for ModelParallelConfig (#7254)
Browse files Browse the repository at this point in the history
* guard ModelParallelConfig

Signed-off-by: eharper <eharper@nvidia.com>

* guard TransformerConfig

Signed-off-by: eharper <eharper@nvidia.com>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* revert

Signed-off-by: eharper <eharper@nvidia.com>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* import

Signed-off-by: eharper <eharper@nvidia.com>

* revert

Signed-off-by: eharper <eharper@nvidia.com>

* revert

Signed-off-by: eharper <eharper@nvidia.com>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* import

Signed-off-by: eharper <eharper@nvidia.com>

---------

Signed-off-by: eharper <eharper@nvidia.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
ericharper and pre-commit-ci[bot] authored Aug 17, 2023
1 parent ad2e3f3 commit 8508e07
Show file tree
Hide file tree
Showing 25 changed files with 56 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
clip_grad_norm_fp32,
)
from nemo.collections.nlp.modules.common.megatron.megatron_init import initialize_model_parallel_for_nemo
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults
from nemo.collections.nlp.modules.common.tokenizer_utils import get_nmt_tokenizer
from nemo.collections.nlp.parts.nlp_overrides import NEMO_MEGATRON_MODEL_PARALLEL_APPSTATE_OVERRIDE, GradScaler
from nemo.core.optim import MainParamsOptimizerWrapper, prepare_lr_scheduler
Expand All @@ -57,6 +58,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronBaseModel"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
VirtualPromptSource,
VirtualPromptStyle,
)
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults
from nemo.collections.nlp.modules.common.transformer.text_generation import TextGeneration
from nemo.collections.nlp.parts.nlp_overrides import GradScaler
from nemo.utils import AppState, logging
Expand All @@ -52,6 +53,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from nemo.collections.nlp.modules.common.megatron.build_model import build_model
from nemo.collections.nlp.modules.common.megatron.module import Float16Module
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_all_params_for_weight_decay_optimization,
get_ltor_masks_and_position_ids,
Expand Down Expand Up @@ -84,6 +85,8 @@

except (ImportError, ModuleNotFoundError):

TransformerConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common import VirtualPromptPlaceholderToken, VirtualPromptSource, VirtualPromptStyle
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_iterator_k_split,
)
Expand Down Expand Up @@ -61,6 +62,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
MegatronTokenLevelEncoderDecoderModule,
)
from nemo.collections.nlp.modules.common.megatron.utils import (
ApexGuardDefaults,
average_losses_across_data_parallel_group,
get_params_for_weight_decay_optimization,
)
Expand All @@ -59,7 +60,7 @@
HAVE_APEX = False

try:
from megatron.core import ModelParallelConfig, parallel_state, tensor_parallel
from megatron.core import parallel_state, tensor_parallel
from megatron.core.enums import ModelType
from megatron.core.pipeline_parallel.schedules import get_forward_backward_func

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from nemo.collections.common.parts.adapter_modules import AdapterModuleUtil
from nemo.collections.common.parts.utils import activation_registry
from nemo.collections.nlp.modules.common.megatron.fused_bias_gelu import fused_bias_gelu
from nemo.collections.nlp.modules.common.megatron.utils import init_method_const, init_method_normal
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults, init_method_const, init_method_normal
from nemo.collections.nlp.modules.common.prompt_encoder import InferenceTable
from nemo.core.classes.mixins import adapter_mixin_strategies

Expand All @@ -46,6 +46,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTransformerEncoderDecoderModule"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTransformerEncoderModule"]
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
3 changes: 3 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import torch
from torch.autograd import Variable
from torch.nn.parameter import Parameter
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults

from nemo.utils import logging

Expand All @@ -27,6 +28,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,11 @@
from megatron.core import ModelParallelConfig, tensor_parallel

HAVE_MEGATRON_CORE = True

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

MIN_DIM_HEAD = 32
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

__all__ = ["MegatronTokenLevelHead", "MegatronTokenLevelEncoderDecoderModule"]
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/nlp/modules/common/megatron/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False

try:
Expand Down
2 changes: 1 addition & 1 deletion nemo/collections/nlp/modules/common/megatron/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
HAVE_APEX = False

try:
from megatron.core import ModelParallelConfig, parallel_state, tensor_parallel
from megatron.core import parallel_state, tensor_parallel
from megatron.core.tensor_parallel.layers import linear_with_grad_accumulation_and_async_allreduce

HAVE_MEGATRON_CORE = True
Expand Down
4 changes: 3 additions & 1 deletion nemo/collections/nlp/modules/common/prompt_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from torch import nn

from nemo.collections.nlp.modules.common.megatron.fused_bias_gelu import fused_bias_gelu
from nemo.collections.nlp.modules.common.megatron.utils import init_method_normal
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults, init_method_normal
from nemo.core.classes import Exportable, NeuralModule
from nemo.core.classes.common import typecheck

Expand All @@ -31,6 +31,8 @@

except (ImportError, ModuleNotFoundError):

ModelParallelConfig = ApexGuardDefaults

HAVE_MEGATRON_CORE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
HAVE_APEX = False

try:
from megatron.core import ModelParallelConfig, parallel_state, tensor_parallel
from megatron.core import parallel_state, tensor_parallel

HAVE_MEGATRON_CORE = True

Expand Down

0 comments on commit 8508e07

Please sign in to comment.