|
4 | 4 | import pytest |
5 | 5 | import torch |
6 | 6 |
|
7 | | -import tests.helpers.utils as tutils |
8 | 7 | from pytorch_lightning import Trainer |
9 | 8 | from pytorch_lightning.callbacks import Callback |
10 | 9 | from pytorch_lightning.plugins import DDPShardedPlugin, DDPSpawnShardedPlugin |
@@ -298,13 +297,24 @@ def training_step(self, batch, batch_idx): |
298 | 297 |
|
299 | 298 |
|
300 | 299 | @RunIf(skip_windows=True, special=True, fairscale=True, min_gpus=2) |
301 | | -@pytest.mark.parametrize("accelerator", ["ddp_sharded"]) |
302 | | -def test_ddp_sharded_plugin_manual_optimization(tmpdir, accelerator): |
303 | | - tutils.set_random_master_port() |
| 300 | +def test_ddp_sharded_plugin_manual_optimization_spawn(tmpdir): |
| 301 | + # todo (sean): this test has been split out as running both tests using parametrize causes "Address in use" |
304 | 302 | model = ManualBoringModel() |
305 | 303 | trainer = Trainer( |
306 | 304 | default_root_dir=tmpdir, |
307 | | - accelerator=accelerator, |
| 305 | + accelerator='ddp_sharded_spawn', |
| 306 | + fast_dev_run=2, |
| 307 | + gpus=2, |
| 308 | + ) |
| 309 | + trainer.fit(model) |
| 310 | + |
| 311 | + |
| 312 | +@RunIf(skip_windows=True, special=True, fairscale=True, min_gpus=2) |
| 313 | +def test_ddp_sharded_plugin_manual_optimization(tmpdir): |
| 314 | + model = ManualBoringModel() |
| 315 | + trainer = Trainer( |
| 316 | + default_root_dir=tmpdir, |
| 317 | + accelerator='ddp_sharded', |
308 | 318 | fast_dev_run=2, |
309 | 319 | gpus=2, |
310 | 320 | ) |
|
0 commit comments