We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9ce794c commit dd01e03Copy full SHA for dd01e03
tests/models/test_tpu.py
@@ -350,7 +350,7 @@ def test_reduce(rank):
350
351
352
@pytest.mark.parametrize("clip_val", [0, 10])
353
-@RunIf(tpu=True)
+@pytest.mark.skipif(not _TPU_AVAILABLE, reason="test requires TPU machine")
354
@pl_multi_process_test
355
@mock.patch("pytorch_lightning.accelerators.tpu.xla_clip_grad_norm_")
356
def test_tpu_precision_16_clip_gradients(mock_clip_grad_norm, clip_val, tmpdir):
0 commit comments