Skip to content

Commit

Permalink
Fixed a few bugs related to dataset len and batches per epoch
Browse files Browse the repository at this point in the history
  • Loading branch information
jsschreck committed Dec 31, 2024
1 parent 08420fb commit c85c987
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 4 deletions.
5 changes: 3 additions & 2 deletions credit/datasets/load_dataset_and_dataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,9 @@ def __iter__(self):
Yields:
sample: A single sample from the dataset.
"""
for sample in self.dataset: # Directly iterate over the dataset
yield sample
dataset_iter = iter(self.dataset)
for _ in range(len(self)):
yield next(dataset_iter)

def __len__(self):
"""
Expand Down
10 changes: 8 additions & 2 deletions credit/trainers/trainerERA5_multistep_grad_accum.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,10 +151,16 @@ def train_one_epoch(

# set up a custom tqdm
if not isinstance(trainloader.dataset, IterableDataset):
# Check if the dataset has its own batches_per_epoch method
if hasattr(trainloader.dataset, "batches_per_epoch"):
dataset_batches_per_epoch = trainloader.dataset.batches_per_epoch()
else:
dataset_batches_per_epoch = len(trainloader)
# Use the user-given number if not larger than the dataset
batches_per_epoch = (
batches_per_epoch
if 0 < batches_per_epoch < len(trainloader)
else len(trainloader)
if 0 < batches_per_epoch < dataset_batches_per_epoch
else dataset_batches_per_epoch
)

batch_group_generator = tqdm.tqdm(
Expand Down

0 comments on commit c85c987

Please sign in to comment.