Skip to content

Commit

Permalink
Remove Apex dependency if not using norm (NVIDIA#10468)
Browse files Browse the repository at this point in the history
Signed-off-by: Chen Cui <chcui@nvidia.com>
Co-authored-by: Pablo Garay <palenq@gmail.com>
Signed-off-by: Lifu Zhang <tomzhanglf@gmail.com>
  • Loading branch information
2 people authored and tomlifu committed Oct 25, 2024
1 parent 4c29d37 commit ed91dda
Showing 1 changed file with 1 addition and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,6 @@ def __init__(
**kwargs,
):
super().__init__()
if not HAVE_APEX:
logging.info("Apex is required to use ParallelLinearAdapters.")
raise RuntimeError("ParallelLinearAdapter can not run without Apex.")
if not HAVE_MEGATRON_CORE:
logging.info("Megatron-core is required to use ParallelLinearAdapters.")
raise RuntimeError("ParallelLinearAdapter can not run without Megatron-core.")
Expand Down Expand Up @@ -227,6 +224,7 @@ def __init__(
if self.norm_position in ["pre", "post"]:
ln_features = in_features if self.norm_position == "pre" else out_features
if norm_type == 'mixedfusedlayernorm':
assert HAVE_APEX, "Apex is required to use MixedFusedLayerNorm"
self.layer_norm = MixedFusedLayerNorm(ln_features, 1e-5, sequence_parallel_enbaled=False)
elif norm_type == 'layernorm':
self.layer_norm = nn.LayerNorm(ln_features)
Expand Down

0 comments on commit ed91dda

Please sign in to comment.