Skip to content

Commit 6174931

Browse files
Enabled tests (#2279)
Co-authored-by: vfdev <vfdev.5@gmail.com>
1 parent 931275a commit 6174931

File tree

1 file changed

+11
-19
lines changed

1 file changed

+11
-19
lines changed

tests/ignite/engine/test_deterministic.py

+11-19
Original file line numberDiff line numberDiff line change
@@ -120,9 +120,7 @@ def test_reproducible_batch_sampler():
120120
resumed_seen_batches = []
121121
for b in dataloader_:
122122
resumed_seen_batches.append(b)
123-
# temporarily disable this while running on torch nightly
124-
if "dev" not in torch.__version__:
125-
assert all([(b1 == b2).all() for b1, b2 in zip(seen_batches[resume_epoch], resumed_seen_batches)])
123+
assert all([(b1 == b2).all() for b1, b2 in zip(seen_batches[resume_epoch], resumed_seen_batches)])
126124

127125

128126
def _test_keep_random_state(with_numpy):
@@ -333,11 +331,9 @@ def _(engine):
333331

334332
@pytest.mark.skipif("win" in sys.platform, reason="Skip extremely slow test on Windows/MacOSX")
335333
def test_resume_random_dataloader_from_epoch():
336-
# temporarily disable this while running on torch nightly
337-
if "dev" not in torch.__version__:
338-
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler)
339-
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="weighted")
340-
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="distributed")
334+
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler)
335+
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="weighted")
336+
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="distributed")
341337

342338

343339
class AugmentedData:
@@ -445,11 +441,9 @@ def _(engine):
445441

446442
@pytest.mark.skipif("win" in sys.platform, reason="Skip extremely slow test on Windows/MacOSX")
447443
def test_resume_random_dataloader_from_iter():
448-
# temporarily disable this while running on torch nightly
449-
if "dev" not in torch.__version__:
450-
_test_resume_random_dataloader_from_iter("cpu", setup_sampler)
451-
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="weighted")
452-
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="distributed")
444+
_test_resume_random_dataloader_from_iter("cpu", setup_sampler)
445+
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="weighted")
446+
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="distributed")
453447

454448

455449
def _test_resume_random_data_iterator_from_epoch(device):
@@ -808,12 +802,10 @@ def write_data_grads_weights(e):
808802
def test_gradients_on_resume_cpu(dirname):
809803
with pytest.raises(AssertionError):
810804
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=True, save_iter=25)
811-
# temporarily disable this while running on torch nightly
812-
if "dev" not in torch.__version__:
813-
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_iter=25)
814-
# resume from epoch
815-
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=True, save_epoch=3)
816-
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_epoch=3)
805+
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_iter=25)
806+
# resume from epoch
807+
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=True, save_epoch=3)
808+
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_epoch=3)
817809

818810

819811
@pytest.mark.skipif(not torch.cuda.is_available(), reason="Skip if no GPU")

0 commit comments

Comments
 (0)