Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove deprecated device attributes from Trainer #14829

Merged
merged 5 commits into from
Sep 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,13 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Removed the deprecated way to set the distributed backend via the environment variable `PL_TORCH_DISTRIBUTED_BACKEND`, in favor of setting the `process_group_backend` in the strategy constructor ([#14693](https://github.com/Lightning-AI/lightning/pull/14693))

- Removed the deprecated device attributes `Trainer.{devices,gpus,num_gpus,ipus,tpu_cores}` in favor of the accelerator-agnostic `Trainer.num_devices` ([#14829](https://github.com/Lightning-AI/lightning/pull/14829))

- Removed the deprecated `Trainer.use_amp` and `LightningModule.use_amp` attributes ([#14832](https://github.com/Lightning-AI/lightning/pull/14832))

- Removed the deprecated `Trainer.root_gpu` attribute in favor of `Trainer.strategy.root_device` ([#14829](https://github.com/Lightning-AI/lightning/pull/14829))


- Removed the deprecated `Trainer.use_amp` and `LightningModule.use_amp` attributes ([#14832](https://github.com/Lightning-AI/lightning/pull/14832))


### Fixed
Expand Down
48 changes: 0 additions & 48 deletions src/pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2052,46 +2052,6 @@ def num_devices(self) -> int:
"""Number of devices the trainer uses per node."""
return len(self.device_ids)

@property
def root_gpu(self) -> Optional[int]:
rank_zero_deprecation(
"`Trainer.root_gpu` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.strategy.root_device.index` instead."
)
return self.strategy.root_device.index if isinstance(self.accelerator, CUDAAccelerator) else None

@property
def tpu_cores(self) -> int:
rank_zero_deprecation(
"`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.num_devices` instead."
)
return self.num_devices if isinstance(self.accelerator, TPUAccelerator) else 0

@property
def ipus(self) -> int:
rank_zero_deprecation(
"`Trainer.ipus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` instead."
)
return self.num_devices if isinstance(self.accelerator, IPUAccelerator) else 0

@property
def num_gpus(self) -> int:
rank_zero_deprecation(
"`Trainer.num_gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` instead."
)
return self.num_devices if isinstance(self.accelerator, CUDAAccelerator) else 0

@property
def devices(self) -> int:
rank_zero_deprecation(
"`Trainer.devices` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
)
return self.num_devices

@property
def lightning_module(self) -> "pl.LightningModule":
# TODO: this is actually an optional return
Expand Down Expand Up @@ -2140,14 +2100,6 @@ def precision(self) -> Union[str, int]:
def scaler(self) -> Optional[Any]:
return getattr(self.precision_plugin, "scaler", None)

@property
def gpus(self) -> Optional[Union[List[int], str, int]]:
rank_zero_deprecation(
"`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
)
return self._accelerator_connector._gpus

@property
def model(self) -> torch.nn.Module:
"""The LightningModule, but possibly wrapped into DataParallel or DistributedDataParallel.
Expand Down
136 changes: 0 additions & 136 deletions tests/tests_pytorch/deprecated_api/test_remove_1-8.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import numpy as np
import pytest

import pytorch_lightning
from pytorch_lightning import Callback, Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.demos.boring_classes import BoringDataModule, BoringModel
Expand All @@ -30,7 +29,6 @@
from pytorch_lightning.trainer.configuration_validator import _check_datamodule_checkpoint_hooks
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities.rank_zero import rank_zero_only
from tests_pytorch.helpers.runif import RunIf


def test_v1_8_0_on_init_start_end(tmpdir):
Expand Down Expand Up @@ -490,104 +488,6 @@ def on_load_checkpoint(self, checkpoint):
_check_datamodule_checkpoint_hooks(trainer)


def test_trainer_config_device_ids():
trainer = Trainer(devices=2)
with pytest.deprecated_call(
match="`Trainer.devices` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
):
trainer.devices == 2


@pytest.mark.parametrize(
["gpus", "expected_root_gpu", "strategy"],
[
pytest.param(None, None, "ddp", id="None is None"),
pytest.param(0, None, "ddp", id="O gpus, expect gpu root device to be None."),
pytest.param(1, 0, "ddp", id="1 gpu, expect gpu root device to be 0."),
pytest.param(-1, 0, "ddp", id="-1 - use all gpus, expect gpu root device to be 0."),
pytest.param("-1", 0, "ddp", id="'-1' - use all gpus, expect gpu root device to be 0."),
pytest.param(3, 0, "ddp", id="3 gpus, expect gpu root device to be 0.(backend:ddp)"),
],
)
def test_root_gpu_property(cuda_count_4, gpus, expected_root_gpu, strategy):
with pytest.deprecated_call(
match="`Trainer.root_gpu` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.strategy.root_device.index` instead."
):
assert Trainer(gpus=gpus, strategy=strategy).root_gpu == expected_root_gpu


@pytest.mark.parametrize(
["gpus", "expected_root_gpu", "strategy"],
[
pytest.param(None, None, None, id="None is None"),
pytest.param(None, None, "ddp", id="None is None"),
pytest.param(0, None, "ddp", id="None is None"),
],
)
def test_root_gpu_property_0_passing(cuda_count_0, gpus, expected_root_gpu, strategy):
with pytest.deprecated_call(
match="`Trainer.root_gpu` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.strategy.root_device.index` instead."
):
assert Trainer(gpus=gpus, strategy=strategy).root_gpu == expected_root_gpu


@pytest.mark.parametrize(
["gpus", "expected_num_gpus", "strategy"],
[
pytest.param(None, 0, None, id="None - expect 0 gpu to use."),
pytest.param(0, 0, None, id="Oth gpu, expect 1 gpu to use."),
pytest.param(1, 1, None, id="1st gpu, expect 1 gpu to use."),
pytest.param(-1, 4, "ddp", id="-1 - use all gpus"),
pytest.param("-1", 4, "ddp", id="'-1' - use all gpus"),
pytest.param(3, 3, "ddp", id="3rd gpu - 1 gpu to use (backend:ddp)"),
],
)
def test_trainer_gpu_parse(cuda_count_4, gpus, expected_num_gpus, strategy):
with pytest.deprecated_call(
match="`Trainer.num_gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` instead."
):
assert Trainer(gpus=gpus, strategy=strategy).num_gpus == expected_num_gpus


@pytest.mark.parametrize(
["gpus", "expected_num_gpus", "strategy"],
[
pytest.param(None, 0, None, id="None - expect 0 gpu to use."),
pytest.param(None, 0, "ddp", id="None - expect 0 gpu to use."),
],
)
def test_trainer_num_gpu_0(cuda_count_0, gpus, expected_num_gpus, strategy):
with pytest.deprecated_call(
match="`Trainer.num_gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` instead."
):
assert Trainer(gpus=gpus, strategy=strategy).num_gpus == expected_num_gpus


@pytest.mark.parametrize(
["trainer_kwargs", "expected_ipus"],
[
({}, 0),
({"devices": 1}, 0),
({"accelerator": "ipu", "devices": 1}, 1),
({"accelerator": "ipu", "devices": 8}, 8),
],
)
def test_trainer_config_ipus(monkeypatch, trainer_kwargs, expected_ipus):
monkeypatch.setattr(pytorch_lightning.accelerators.ipu.IPUAccelerator, "is_available", lambda _: True)
monkeypatch.setattr(pytorch_lightning.strategies.ipu, "_IPU_AVAILABLE", lambda: True)
trainer = Trainer(**trainer_kwargs)
with pytest.deprecated_call(
match="`Trainer.ipus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` instead."
):
trainer.ipus == expected_ipus


def test_v1_8_0_deprecated_lightning_ipu_module():
with pytest.deprecated_call(match=r"has been deprecated in v1.7.0 and will be removed in v1.8."):
_ = LightningIPUModule(BoringModel(), 32)
Expand Down Expand Up @@ -653,39 +553,3 @@ def on_save_checkpoint(self, trainer, pl_module, checkpoint):

trainer.callbacks = [TestCallbackSaveHookOverride()]
trainer.save_checkpoint(tmpdir + "/pathok.ckpt")


@pytest.mark.parametrize(
"trainer_kwargs",
[
pytest.param({"accelerator": "gpu", "devices": 2}, marks=RunIf(mps=False)),
pytest.param({"accelerator": "gpu", "devices": [0, 2]}, marks=RunIf(mps=False)),
pytest.param({"accelerator": "gpu", "devices": "2"}, marks=RunIf(mps=False)),
pytest.param({"accelerator": "gpu", "devices": "0,"}, marks=RunIf(mps=False)),
pytest.param({"accelerator": "gpu", "devices": 1}, marks=RunIf(mps=True)),
pytest.param({"accelerator": "gpu", "devices": [0]}, marks=RunIf(mps=True)),
pytest.param({"accelerator": "gpu", "devices": "0,"}, marks=RunIf(mps=True)),
],
)
def test_trainer_gpus(cuda_count_4, trainer_kwargs):
trainer = Trainer(**trainer_kwargs)
with pytest.deprecated_call(
match=(
"`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8."
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
)
):
assert trainer.gpus == trainer_kwargs["devices"]


@RunIf(skip_windows=True)
def test_trainer_tpu_cores(monkeypatch):
monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "is_available", lambda _: True)
trainer = Trainer(accelerator="tpu", devices=8)
with pytest.deprecated_call(
match=(
"`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.num_devices` instead."
)
):
assert trainer.tpu_cores == 8