We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 531dfb8 commit c53873dCopy full SHA for c53873d
pytorch_lightning/plugins/training_type/ddp_spawn.py
@@ -59,7 +59,7 @@ def __init__(
59
self.sync_batchnorm = sync_batchnorm
60
self._ddp_kwargs = kwargs
61
self.dist = LightningDistributed()
62
- self.num_processes = len(parallel_devices) if parallel_devices is not None else parallel_devices
+ self.num_processes = len(parallel_devices) if parallel_devices is not None else None
63
self.node_rank = 0
64
self.mp_queue = None
65
0 commit comments