From 47bccec1f8dc5cb49cfe676e38145c1439faa30c Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Tue, 30 Nov 2021 16:54:47 +0530 Subject: [PATCH 1/8] Don't import torch_xla.debug for torch-xla<1.8 --- pytorch_lightning/profiler/xla.py | 9 +++++++-- tests/profiler/test_xla_profiler.py | 6 ++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/profiler/xla.py b/pytorch_lightning/profiler/xla.py index e30f06f84e952..fb5873a521057 100644 --- a/pytorch_lightning/profiler/xla.py +++ b/pytorch_lightning/profiler/xla.py @@ -42,9 +42,10 @@ from typing import Dict from pytorch_lightning.profiler.base import BaseProfiler -from pytorch_lightning.utilities import _TPU_AVAILABLE +from pytorch_lightning.utilities import _TORCH_GREATER_EQUAL_1_8, _TPU_AVAILABLE +from pytorch_lightning.utilities.exceptions import MisconfigurationException -if _TPU_AVAILABLE: +if _TPU_AVAILABLE and _TORCH_GREATER_EQUAL_1_8: import torch_xla.debug.profiler as xp log = logging.getLogger(__name__) @@ -65,6 +66,10 @@ class XLAProfiler(BaseProfiler): def __init__(self, port: int = 9012) -> None: """This Profiler will help you debug and optimize training workload performance for your models using Cloud TPU performance tools.""" + if not _TPU_AVAILABLE: + raise MisconfigurationException("`XLAProfiler` is only supported on TPUs") + if not _TORCH_GREATER_EQUAL_1_8: + raise MisconfigurationException("`XLAProfiler` is only supported with `torch-xla>=1.8`") super().__init__(dirpath=None, filename=None) self.port = port self._recording_map: Dict = {} diff --git a/tests/profiler/test_xla_profiler.py b/tests/profiler/test_xla_profiler.py index 2afbf69a6d0b0..7f460ea11d322 100644 --- a/tests/profiler/test_xla_profiler.py +++ b/tests/profiler/test_xla_profiler.py @@ -18,14 +18,16 @@ from pytorch_lightning import Trainer from pytorch_lightning.profiler import XLAProfiler -from pytorch_lightning.utilities import _TPU_AVAILABLE +from pytorch_lightning.utilities import _TORCH_GREATER_EQUAL_1_8, _TPU_AVAILABLE from tests.helpers import BoringModel from tests.helpers.runif import RunIf if _TPU_AVAILABLE: - import torch_xla.debug.profiler as xp import torch_xla.utils.utils as xu + if _TORCH_GREATER_EQUAL_1_8: + import torch_xla.debug.profiler as xp + @RunIf(tpu=True) def test_xla_profiler_instance(tmpdir): From 96da9958563e4f269b21dc2fefe8c29edc479e18 Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Tue, 30 Nov 2021 17:00:18 +0530 Subject: [PATCH 2/8] Update changelog --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1eed9848cdbb..cda5c5159f169 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -199,8 +199,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed the default logging level for batch hooks associated with training from `on_step=False, on_epoch=True` to `on_step=True, on_epoch=False` ([#10756](https://github.com/PyTorchLightning/pytorch-lightning/pull/10756)) - -- +- Fixed importing `torch_xla.debug` for `torch-xla<1.8` ([#10836](https://github.com/PyTorchLightning/pytorch-lightning/pull/10836)) - From 569380a74f6ab668389b5e720720fbbe7b9365ef Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Tue, 30 Nov 2021 17:29:47 +0530 Subject: [PATCH 3/8] Add tests --- tests/profiler/test_xla_profiler.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/profiler/test_xla_profiler.py b/tests/profiler/test_xla_profiler.py index 7f460ea11d322..f0c3829396972 100644 --- a/tests/profiler/test_xla_profiler.py +++ b/tests/profiler/test_xla_profiler.py @@ -13,12 +13,14 @@ # limitations under the License. import os from multiprocessing import Event, Process +from unittest.mock import patch import pytest from pytorch_lightning import Trainer from pytorch_lightning.profiler import XLAProfiler from pytorch_lightning.utilities import _TORCH_GREATER_EQUAL_1_8, _TPU_AVAILABLE +from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel from tests.helpers.runif import RunIf @@ -62,3 +64,16 @@ def train_worker(): p.terminate() assert os.isfile(os.path.join(logdir, "plugins", "profile", "*", "*.xplane.pb")) + + +@patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=False) +def test_xla_profiler_tpu_not_available_exception(*_): + with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported on TPUs"): + _ = XLAProfiler() + + +@RunIf(max_torch="1.8.0") +@patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=True) +def test_xla_profiler_torch_lesser_than_1_8_exception(*_): + with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported with `torch-xla>=1.8`"): + _ = XLAProfiler() From 62464ce238847abc204183694a57c28f59954ff1 Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Tue, 30 Nov 2021 17:42:43 +0530 Subject: [PATCH 4/8] Address reviews --- pytorch_lightning/profiler/xla.py | 2 +- tests/profiler/test_xla_profiler.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/profiler/xla.py b/pytorch_lightning/profiler/xla.py index fb5873a521057..c89685bcad0be 100644 --- a/pytorch_lightning/profiler/xla.py +++ b/pytorch_lightning/profiler/xla.py @@ -69,7 +69,7 @@ def __init__(self, port: int = 9012) -> None: if not _TPU_AVAILABLE: raise MisconfigurationException("`XLAProfiler` is only supported on TPUs") if not _TORCH_GREATER_EQUAL_1_8: - raise MisconfigurationException("`XLAProfiler` is only supported with `torch-xla>=1.8`") + raise MisconfigurationException("`XLAProfiler` is only supported with `torch-xla >= 1.8`") super().__init__(dirpath=None, filename=None) self.port = port self._recording_map: Dict = {} diff --git a/tests/profiler/test_xla_profiler.py b/tests/profiler/test_xla_profiler.py index f0c3829396972..2d87a8340eaa1 100644 --- a/tests/profiler/test_xla_profiler.py +++ b/tests/profiler/test_xla_profiler.py @@ -75,5 +75,5 @@ def test_xla_profiler_tpu_not_available_exception(*_): @RunIf(max_torch="1.8.0") @patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=True) def test_xla_profiler_torch_lesser_than_1_8_exception(*_): - with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported with `torch-xla>=1.8`"): + with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported with `torch-xla >= 1.8`"): _ = XLAProfiler() From 9f9fbbc59d46f0abc7292846b46685ae1aa26cd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Wed, 1 Dec 2021 01:44:59 +0100 Subject: [PATCH 5/8] fix accidental _notebooks change --- _notebooks | 1 - 1 file changed, 1 deletion(-) delete mode 160000 _notebooks diff --git a/_notebooks b/_notebooks deleted file mode 160000 index a2fb6468112b7..0000000000000 --- a/_notebooks +++ /dev/null @@ -1 +0,0 @@ -Subproject commit a2fb6468112b7e1dad501c3b6a17533a4adfeabc From 04bc4d25d7dc04b1df2694fe785d54a6ce032e75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Wed, 1 Dec 2021 01:45:12 +0100 Subject: [PATCH 6/8] fix accidental _notebooks change --- _notebooks | 1 + 1 file changed, 1 insertion(+) create mode 160000 _notebooks diff --git a/_notebooks b/_notebooks new file mode 160000 index 0000000000000..0c325829101d5 --- /dev/null +++ b/_notebooks @@ -0,0 +1 @@ +Subproject commit 0c325829101d5a6ebf32ed99bbf5b09badf04a59 From 2a5c3a33dd78adeefd88666e91cf3b87bf0420a7 Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Wed, 1 Dec 2021 10:52:16 +0530 Subject: [PATCH 7/8] Update tests --- tests/profiler/test_xla_profiler.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/profiler/test_xla_profiler.py b/tests/profiler/test_xla_profiler.py index 2d87a8340eaa1..e77e756e691f3 100644 --- a/tests/profiler/test_xla_profiler.py +++ b/tests/profiler/test_xla_profiler.py @@ -70,10 +70,3 @@ def train_worker(): def test_xla_profiler_tpu_not_available_exception(*_): with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported on TPUs"): _ = XLAProfiler() - - -@RunIf(max_torch="1.8.0") -@patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=True) -def test_xla_profiler_torch_lesser_than_1_8_exception(*_): - with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported with `torch-xla >= 1.8`"): - _ = XLAProfiler() From e8edfb6b10271e980afef4d7ea4b7a59d1b1623b Mon Sep 17 00:00:00 2001 From: Kaushik B Date: Mon, 6 Dec 2021 11:28:59 +0530 Subject: [PATCH 8/8] Update test --- tests/profiler/test_xla_profiler.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/profiler/test_xla_profiler.py b/tests/profiler/test_xla_profiler.py index e77e756e691f3..7f460ea11d322 100644 --- a/tests/profiler/test_xla_profiler.py +++ b/tests/profiler/test_xla_profiler.py @@ -13,14 +13,12 @@ # limitations under the License. import os from multiprocessing import Event, Process -from unittest.mock import patch import pytest from pytorch_lightning import Trainer from pytorch_lightning.profiler import XLAProfiler from pytorch_lightning.utilities import _TORCH_GREATER_EQUAL_1_8, _TPU_AVAILABLE -from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.helpers import BoringModel from tests.helpers.runif import RunIf @@ -64,9 +62,3 @@ def train_worker(): p.terminate() assert os.isfile(os.path.join(logdir, "plugins", "profile", "*", "*.xplane.pb")) - - -@patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=False) -def test_xla_profiler_tpu_not_available_exception(*_): - with pytest.raises(MisconfigurationException, match="`XLAProfiler` is only supported on TPUs"): - _ = XLAProfiler()