Skip to content

Commit f806a4a

Browse files
author
SeanNaren
committed
Revert back to splitting the tests
1 parent a2a9ef0 commit f806a4a

File tree

1 file changed

+15
-5
lines changed

1 file changed

+15
-5
lines changed

tests/plugins/test_sharded_plugin.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import pytest
55
import torch
66

7-
import tests.helpers.utils as tutils
87
from pytorch_lightning import Trainer
98
from pytorch_lightning.callbacks import Callback
109
from pytorch_lightning.plugins import DDPShardedPlugin, DDPSpawnShardedPlugin
@@ -298,13 +297,24 @@ def training_step(self, batch, batch_idx):
298297

299298

300299
@RunIf(skip_windows=True, special=True, fairscale=True, min_gpus=2)
301-
@pytest.mark.parametrize("accelerator", ["ddp_sharded"])
302-
def test_ddp_sharded_plugin_manual_optimization(tmpdir, accelerator):
303-
tutils.set_random_master_port()
300+
def test_ddp_sharded_plugin_manual_optimization_spawn(tmpdir):
301+
# todo (sean): this test has been split out as running both tests using parametrize causes "Address in use"
304302
model = ManualBoringModel()
305303
trainer = Trainer(
306304
default_root_dir=tmpdir,
307-
accelerator=accelerator,
305+
accelerator='ddp_sharded_spawn',
306+
fast_dev_run=2,
307+
gpus=2,
308+
)
309+
trainer.fit(model)
310+
311+
312+
@RunIf(skip_windows=True, special=True, fairscale=True, min_gpus=2)
313+
def test_ddp_sharded_plugin_manual_optimization(tmpdir):
314+
model = ManualBoringModel()
315+
trainer = Trainer(
316+
default_root_dir=tmpdir,
317+
accelerator='ddp_sharded',
308318
fast_dev_run=2,
309319
gpus=2,
310320
)

0 commit comments

Comments
 (0)