Skip to content

Commit

Permalink
Fix the megatron cyclic sampler (#9458)
Browse files Browse the repository at this point in the history
  • Loading branch information
jstjohn authored Jun 13, 2024
1 parent e00ba0b commit 5fa95ce
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions nemo/lightning/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,6 @@ def add_megatron_sampler(
)
elif dataloader_type == 'cyclic':
batch_sampler = MegatronPretrainingRandomSampler(
dataloader.dataset,
total_samples=len(dataloader.dataset),
consumed_samples=consumed_samples,
micro_batch_size=micro_batch_size,
Expand Down Expand Up @@ -259,8 +258,9 @@ def __iter__(self):
assert current_epoch_samples % self.micro_batch_times_data_parallel_size == 0

# data sharding and random sampling
data_parallel_size = self.micro_batch_times_data_parallel_size // self.micro_batch_size
bucket_size = (self.total_samples // self.micro_batch_times_data_parallel_size) * self.micro_batch_size
bucket_offset = current_epoch_samples // self.data_parallel_size
bucket_offset = current_epoch_samples // data_parallel_size
start_idx = self.data_parallel_rank * bucket_size

g = torch.Generator()
Expand Down

0 comments on commit 5fa95ce

Please sign in to comment.