Skip to content

Commit

Permalink
Fix file path for shard_num 1 with mllama converter (#35053)
Browse files Browse the repository at this point in the history
"#35049 fix path for num_shard 1"
  • Loading branch information
strangiato authored Dec 10, 2024
1 parent 0938b57 commit 80f2b16
Showing 1 changed file with 5 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,11 @@ def write_model(

print(f"Fetching all parameters from the checkpoint at {input_base_path}...")
if num_shards == 1:
loaded = [torch.load(os.path.join(input_base_path, "consolidated.pth"), map_location="cpu", mmap=True)]
if os.path.exists(os.path.join(input_base_path, "consolidated.00.pth")):
path = os.path.join(input_base_path, "consolidated.00.pth")
else:
path = os.path.join(input_base_path, "consolidated.pth")
loaded = [torch.load(path, map_location="cpu", mmap=True)]
else:
loaded = [
torch.load(os.path.join(input_base_path, f"consolidated.{i:02d}.pth"), map_location="cpu", mmap=True)
Expand Down

0 comments on commit 80f2b16

Please sign in to comment.