Skip to content

Commit f80fe3f

Browse files
abhudevkulinseth
authored andcommitted
Update blocklist (#106)
* Updated blocklist * Further update blocklist
1 parent 5255f7a commit f80fe3f

File tree

1 file changed

+2
-37
lines changed

1 file changed

+2
-37
lines changed

test/test_mps.py

Lines changed: 2 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -7542,7 +7542,6 @@ class TestConsistency(TestCase):
75427542

75437543
# These were moved from ALLOWLIST to BLOCK as they are not working
75447544
# locally
7545-
'tile': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
75467545
'__radd__': ['torch.bool', 'torch.uint8'],
75477546
'__rmul__': ['torch.uint8'],
75487547
'add': ['torch.bool', 'torch.uint8'],
@@ -7552,39 +7551,23 @@ class TestConsistency(TestCase):
75527551

75537552
# Functions that are flaky
75547553
# These are detected as "ok" by the expect case but actually fail to run sometimes
7555-
'H': None,
7556-
'T': None,
7557-
'as_strided': None,
7558-
'broadcast_tensors': None,
75597554
'broadcast': None,
7560-
'broadcast_to': None,
7561-
'diagonal': None,
75627555
'divfloor_rounding': None,
75637556
'divno_rounding_mode': None,
75647557
'divtrunc_rounding': None,
7565-
'dsplit': None,
7566-
'hsplit': None,
75677558
'empty': None,
7568-
'expand_as': None,
7569-
'expand': None,
75707559
'ge': None,
75717560
'ne': None,
75727561
'le': None,
75737562
'lt': None,
75747563
'gt': None,
7575-
'transpose': None,
75767564
'splitlist_args': None,
7577-
'select': None,
7578-
'reshape': None,
75797565
'reshape_as': None,
7580-
'permute': None,
75817566
'norm': None,
75827567
'nn.functional.pixel_unshuffle': None,
75837568
'nn.functional.pixel_shuffle': None,
75847569
'nn.functional.cross_entropy': None,
75857570
'nn.functional.one_hot': None,
7586-
'narrow': None,
7587-
'movedim': None,
75887571
'minreduction_with_dim': None,
75897572
'minreduction_no_dim': None,
75907573
'minbinary': None,
@@ -7595,8 +7578,6 @@ class TestConsistency(TestCase):
75957578
'maxbinary': None,
75967579
'maximum': None,
75977580
'minimum': None,
7598-
'mT': None,
7599-
'mH': None,
76007581
'outer': None,
76017582
'softmaxwith_dtype': None,
76027583
'rounddecimals_neg_3': None,
@@ -7606,15 +7587,11 @@ class TestConsistency(TestCase):
76067587
'nn.functional.softminwith_dtype': None,
76077588
'nn.functional.feature_alpha_dropoutwith_train': None,
76087589
'log_softmaxdtype': None,
7609-
'split_with_sizes': None,
76107590
'trapezoid': None,
76117591
'eq': None,
76127592
'mul': None,
7613-
'cartesian_prod': None,
76147593
'nonzero': None,
7615-
'bool': None,
76167594
'inner': None,
7617-
'dstack': None,
76187595
'take_along_dim': None,
76197596

76207597
# New block list ops that need investigation
@@ -7647,11 +7624,9 @@ class TestConsistency(TestCase):
76477624
'clamp_max': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76487625
'clamp_min': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76497626
'constant_pad_nd': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7650-
'corrcoef': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76517627
'cos': ['torch.bool'],
76527628
'cosh': ['torch.bool'],
76537629
'count_nonzero': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7654-
'cov': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76557630
'diff': ['torch.bool', 'torch.uint8'],
76567631
'eig': ['torch.float32'],
76577632
'empty_like': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
@@ -7666,9 +7641,8 @@ class TestConsistency(TestCase):
76667641
'fft.rfft2': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76677642
'fft.rfft': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76687643
'fft.rfftn': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7669-
'float': ['torch.bool', 'torch.float16', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7644+
'float': ['torch.int64'],
76707645
'gather': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7671-
'gradient': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32'],
76727646
'half': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76737647
'index_put': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
76747648
'index_select': ['torch.uint8'],
@@ -7708,7 +7682,6 @@ class TestConsistency(TestCase):
77087682
'nn.functional.normalize': ['torch.float32'],
77097683
'nn.functional.pad': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64'],
77107684
'nn.functional.pairwise_distance': ['torch.uint8'],
7711-
'nn.functional.softsign': ['torch.int32'],
77127685
'nn.functional.triplet_margin_loss': ['torch.uint8'],
77137686
'nn.functional.triplet_margin_with_distance_loss': ['torch.uint8'],
77147687
'nn.functional.upsample_nearest': ['torch.float32'],
@@ -7734,17 +7707,9 @@ class TestConsistency(TestCase):
77347707
'tanh': ['torch.bool'],
77357708
'tensor_split': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
77367709
'topk': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7737-
'trapz': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
7738-
'true_divide': ['torch.int32', 'torch.int64'],
7710+
'trapz': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
77397711
'nn.functional.local_response_norm': ['torch.int64'],
7740-
'flip': ['torch.bool'],
7741-
'fliplr': ['torch.bool'],
7742-
'flipud': ['torch.bool'],
77437712
'index_select': ['torch.bool'],
7744-
'repeat': ['torch.bool'],
7745-
'rot90': ['torch.bool'],
7746-
'tile': ['torch.bool'],
7747-
'split': ['torch.float32'],
77487713
}
77497714

77507715
# Used for accept mode only

0 commit comments

Comments
 (0)