Skip to content

Commit

Permalink
Resolve dtype with utils_funcs.py
Browse files Browse the repository at this point in the history
Signed-off-by: Jan Lasek <janek.lasek@gmail.com>
  • Loading branch information
janekl committed Dec 6, 2023
1 parent 52d50e9 commit e4c89ef
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 10 deletions.
8 changes: 7 additions & 1 deletion nemo/collections/nlp/parts/utils_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__all__ = ['list2str', 'tensor2list', 'plot_confusion_matrix', 'get_classification_report']
__all__ = [
'torch_dtype_from_precision',
'list2str',
'tensor2list',
'plot_confusion_matrix',
'get_classification_report',
]

import os
import time
Expand Down
11 changes: 2 additions & 9 deletions scripts/nlp_language_modeling/convert_hf_llama_to_nemo.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
NLPSaveRestoreConnector,
PipelineMixedPrecisionPlugin,
)
from nemo.collections.nlp.parts.utils_funcs import torch_dtype_from_precision
from nemo.utils import logging


Expand Down Expand Up @@ -170,15 +171,6 @@ def convert(args):
else:
plugins.append(PipelineMixedPrecisionPlugin(precision=plugin_precision, device='cuda', scaler=scaler))

if precision == 32:
dtype = torch.float32
elif precision in [16, "16", "16-mixed"]:
dtype = torch.float16
elif precision in ["bf16", "bf16-mixed"]:
dtype = torch.bfloat16
else:
dtype = torch.float32 # fallback

nemo_config.precision = precision
print(f"nemo_config: {nemo_config}")

Expand Down Expand Up @@ -315,6 +307,7 @@ def convert(args):
model._save_restore_connector = NLPSaveRestoreConnector()

# cast to target precision and disable cpu init
dtype = torch_dtype_from_precision(precision)
model = model.to(dtype=dtype)
model.cfg.use_cpu_initialization = False

Expand Down

0 comments on commit e4c89ef

Please sign in to comment.