Skip to content

Commit

Permalink
Add bf16-mixed and 16-mixed in module.py (NVIDIA#7227)
Browse files Browse the repository at this point in the history
Signed-off-by: Abhishree <abhishreetm@gmail.com>
  • Loading branch information
athitten authored Aug 15, 2023
1 parent 833cb56 commit 4a56ade
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions nemo/collections/nlp/modules/common/megatron/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,13 +263,13 @@ def __init__(self, config: ModelParallelConfig, module, precision, share_token_e
super().__init__(config=config, share_token_embeddings=share_token_embeddings)
self.precision = precision

if precision == 'bf16':
if precision in ['bf16', 'bf16-mixed']:
self.add_module('module', module.bfloat16())

def float16_converter(val):
return val.bfloat16()

elif int(precision) == 16:
elif precision in [16, '16', '16-mixed']:
self.add_module('module', module.half())

def float16_converter(val):
Expand Down

0 comments on commit 4a56ade

Please sign in to comment.