Skip to content

Commit 6ac16ff

Browse files
authored
Fix DistribType for ddp_cpu (spawn) (#7492)
1 parent 53f8d9a commit 6ac16ff

File tree

4 files changed

+10
-6
lines changed

4 files changed

+10
-6
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
9090
- Fixed recursive passing of `wrong_type` keyword argument in `pytorch_lightning.utilities.apply_to_collection` ([#7433](https://github.com/PyTorchLightning/pytorch-lightning/pull/7433))
9191

9292

93+
- Fixed setting correct `DistribType` for `ddp_cpu` (spawn) backend ([#7492](https://github.com/PyTorchLightning/pytorch-lightning/pull/7492))
94+
9395

9496
## [1.3.1] - 2021-05-11
9597

pytorch_lightning/trainer/connectors/accelerator_connector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -519,7 +519,7 @@ def set_distributed_mode(self, distributed_backend: Optional[str] = None):
519519

520520
# special case with DDP on CPUs
521521
if self.distributed_backend == "ddp_cpu":
522-
self._distrib_type = DistributedType.DDP
522+
self._distrib_type = DistributedType.DDP_SPAWN
523523
if self.num_gpus > 0:
524524
rank_zero_warn(
525525
'You requested one or more GPUs, but set the backend to `ddp_cpu`. Training will not use GPUs.'

tests/accelerators/test_accelerator_connector.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -430,13 +430,15 @@ def test_ipython_incompatible_backend_error(*_):
430430
with pytest.raises(MisconfigurationException, match="backend ddp is not compatible"):
431431
Trainer(accelerator="ddp", gpus=2)
432432

433-
with pytest.raises(MisconfigurationException, match="backend ddp is not compatible"):
434-
Trainer(accelerator="ddp_cpu", num_processes=2)
435-
436433
with pytest.raises(MisconfigurationException, match="backend ddp2 is not compatible"):
437434
Trainer(accelerator="ddp2", gpus=2)
438435

439436

437+
@mock.patch("pytorch_lightning.utilities._IS_INTERACTIVE", return_value=True)
438+
def test_ipython_compatible_backend(*_):
439+
Trainer(accelerator="ddp_cpu", num_processes=2)
440+
441+
440442
@pytest.mark.parametrize(
441443
["accelerator", "plugin"],
442444
[('ddp_spawn', 'ddp_sharded'), (None, 'ddp_sharded')],

tests/trainer/test_trainer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1132,7 +1132,7 @@ def test_num_sanity_val_steps_neg_one(tmpdir, limit_val_batches):
11321132
),
11331133
(
11341134
dict(accelerator="ddp_cpu", num_processes=2, gpus=None),
1135-
dict(_distrib_type=DistributedType.DDP, _device_type=DeviceType.CPU, num_gpus=0, num_processes=2),
1135+
dict(_distrib_type=DistributedType.DDP_SPAWN, _device_type=DeviceType.CPU, num_gpus=0, num_processes=2),
11361136
),
11371137
(
11381138
dict(accelerator="ddp2", gpus=None),
@@ -1152,7 +1152,7 @@ def test_num_sanity_val_steps_neg_one(tmpdir, limit_val_batches):
11521152
),
11531153
(
11541154
dict(accelerator="ddp_cpu", num_processes=2, gpus=1),
1155-
dict(_distrib_type=DistributedType.DDP, _device_type=DeviceType.CPU, num_gpus=0, num_processes=2),
1155+
dict(_distrib_type=DistributedType.DDP_SPAWN, _device_type=DeviceType.CPU, num_gpus=0, num_processes=2),
11561156
),
11571157
(
11581158
dict(accelerator="ddp2", gpus=1),

0 commit comments

Comments
 (0)