From 77628ec6033b47e489054b383953378288526391 Mon Sep 17 00:00:00 2001 From: Stas Bekman Date: Fri, 1 Dec 2023 15:21:30 -0800 Subject: [PATCH] Update accelerator.py --- src/accelerate/accelerator.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/accelerate/accelerator.py b/src/accelerate/accelerator.py index 2815a00577a..bc4e351152f 100755 --- a/src/accelerate/accelerator.py +++ b/src/accelerate/accelerator.py @@ -1427,14 +1427,14 @@ def _prepare_deepspeed(self, *args): if any(bs is None for bs in batch_sizes): raise ValueError( - "At least one of the dataloaders passed to `accelerate.prepare()` has `None` as batch size." - "Please set an integer value in `train_micro_batch_size_per_gpu` in the deepspeed config file" + "At least one of the dataloaders passed to `accelerate.prepare()` has `None` as batch size. " + "Please set an integer value in `train_micro_batch_size_per_gpu` in the deepspeed config file " "or assign integer value to `AcceleratorState().deepspeed_plugin.deepspeed_config['train_micro_batch_size_per_gpu']`." ) if len(batch_sizes) == 0: raise ValueError( "When using DeepSpeed `accelerate.prepare()` requires you to pass at least one of training or evaluation dataloaders " - "or alternatively set an integer value in `train_micro_batch_size_per_gpu` in the deepspeed config file" + "or alternatively set an integer value in `train_micro_batch_size_per_gpu` in the deepspeed config file " "or assign integer value to `AcceleratorState().deepspeed_plugin.deepspeed_config['train_micro_batch_size_per_gpu']`." )