Skip to content

Commit

Permalink
.nemo conversion bug fix (#10598)
Browse files Browse the repository at this point in the history
* bug fix

Signed-off-by: dimapihtar <dpihtar@gmail.com>

* bug fix

Signed-off-by: dimapihtar <dpihtar@gmail.com>

* Apply isort and black reformatting

Signed-off-by: dimapihtar <dimapihtar@users.noreply.github.com>

---------

Signed-off-by: dimapihtar <dpihtar@gmail.com>
Signed-off-by: dimapihtar <dimapihtar@users.noreply.github.com>
Co-authored-by: dimapihtar <dimapihtar@users.noreply.github.com>
  • Loading branch information
2 people authored and monica-sekoyan committed Oct 11, 2024
1 parent d01c785 commit 2109ca8
Showing 1 changed file with 13 additions and 7 deletions.
20 changes: 13 additions & 7 deletions nemo/collections/nlp/parts/nlp_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -1068,16 +1068,19 @@ def dummy():
torch.distributed.barrier()

# create nemo file from folder with all mp_ranks checkpoints
if (
app_state.pipeline_model_parallel_rank == 0
and app_state.tensor_model_parallel_rank == 0
and app_state.data_parallel_rank == 0
):
with tempfile.TemporaryDirectory() as tmpdir:
if dist_ckpt:
should_move_data = is_global_rank_zero()
else:
should_move_data = (
app_state.pipeline_model_parallel_rank == 0
and app_state.tensor_model_parallel_rank == 0
and app_state.data_parallel_rank == 0
)

if should_move_data:
with tempfile.TemporaryDirectory() as tmpdir:
if dist_ckpt:
shutil.move(str(dist_ckpt_dir), tmpdir)

elif app_state.pipeline_model_parallel_size == 1:
# move weights to the tmpdir
for tp_rank in range(app_state.tensor_model_parallel_size):
Expand Down Expand Up @@ -1123,6 +1126,9 @@ def dummy():
for file in os.listdir(tmpdir):
shutil.move(os.path.join(tmpdir, file), folder_path)

if torch.distributed.is_initialized():
torch.distributed.barrier()

else:
return super().save_to(model, save_path)

Expand Down

0 comments on commit 2109ca8

Please sign in to comment.