Skip to content
This repository was archived by the owner on Jul 24, 2024. It is now read-only.

Commit 20e9656

Browse files
authored
Disabled background batch processing (#77)
1 parent 408045e commit 20e9656

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

dl_bench/utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,10 @@ def get_inf_loaders(n, in_shape, batch_size, device: str):
3434

3535
ds = RandomInfDataset(n, in_shape)
3636
train_loader = DataLoader(
37-
ds, batch_size=batch_size, shuffle=True, num_workers=4, pin_memory=pin_memory
37+
ds, batch_size=batch_size, shuffle=True, num_workers=0, pin_memory=pin_memory
3838
)
3939
test_loader = DataLoader(
40-
ds, batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=pin_memory
40+
ds, batch_size=batch_size, shuffle=False, num_workers=0, pin_memory=pin_memory
4141
)
4242
return train_loader, test_loader
4343

@@ -363,7 +363,7 @@ def inference(self, backend: Backend):
363363
self.dataset,
364364
batch_size=self.batch_size,
365365
shuffle=False,
366-
num_workers=4,
366+
num_workers=0,
367367
pin_memory=backend.device_name == "cuda",
368368
)
369369

0 commit comments

Comments
 (0)