Skip to content

Commit 1799d7d

Browse files
abhudevkulinseth
authored andcommitted
Update blocklist (#106)
* Updated blocklist * Further update blocklist
1 parent a33b4ed commit 1799d7d

File tree

1 file changed

+3
-34
lines changed

1 file changed

+3
-34
lines changed

test/test_mps.py

Lines changed: 3 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -8688,7 +8688,6 @@ class TestConsistency(TestCase):
86888688

86898689
# These were moved from ALLOWLIST to BLOCK as they are not working
86908690
# locally
8691-
'tile': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
86928691
'__radd__': ['torch.bool', 'torch.uint8'],
86938692
'__rmul__': ['torch.uint8'],
86948693
'neg': ['torch.uint8'],
@@ -8699,39 +8698,23 @@ class TestConsistency(TestCase):
86998698

87008699
# Functions that are flaky
87018700
# These are detected as "ok" by the expect case but actually fail to run sometimes
8702-
'H': None,
8703-
'T': None,
8704-
'as_strided': None,
8705-
'broadcast_tensors': None,
87068701
'broadcast': None,
8707-
'broadcast_to': None,
8708-
'diagonal': None,
87098702
'divfloor_rounding': None,
87108703
'divno_rounding_mode': None,
87118704
'divtrunc_rounding': None,
8712-
'dsplit': None,
8713-
'hsplit': None,
87148705
'empty': None,
8715-
'expand_as': None,
8716-
'expand': None,
87178706
'ge': None,
87188707
'ne': None,
87198708
'le': None,
87208709
'lt': None,
87218710
'gt': None,
8722-
'transpose': None,
87238711
'splitlist_args': None,
8724-
'select': None,
8725-
'reshape': None,
87268712
'reshape_as': None,
8727-
'permute': None,
87288713
'norm': None,
87298714
'nn.functional.pixel_unshuffle': None,
87308715
'nn.functional.pixel_shuffle': None,
87318716
'nn.functional.cross_entropy': None,
87328717
'nn.functional.one_hot': None,
8733-
'narrow': None,
8734-
'movedim': None,
87358718
'minreduction_with_dim': None,
87368719
'minreduction_no_dim': None,
87378720
'minbinary': None,
@@ -8742,8 +8725,6 @@ class TestConsistency(TestCase):
87428725
'maxbinary': None,
87438726
'maximum': None,
87448727
'minimum': None,
8745-
'mT': None,
8746-
'mH': None,
87478728
'outer': None,
87488729
'softmaxwith_dtype': None,
87498730
'rounddecimals_neg_3': None,
@@ -8759,8 +8740,8 @@ class TestConsistency(TestCase):
87598740
'mul': None,
87608741
'cartesian_prod': None,
87618742
'bool': None,
8743+
'nonzero': None,
87628744
'inner': None,
8763-
'dstack': None,
87648745
'take_along_dim': None,
87658746

87668747
# New block list ops that need investigation
@@ -8793,11 +8774,9 @@ class TestConsistency(TestCase):
87938774
'clamp_max': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
87948775
'clamp_min': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
87958776
'constant_pad_nd': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8796-
'corrcoef': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
87978777
'cos': ['torch.bool'],
87988778
'cosh': ['torch.bool'],
87998779
'count_nonzero': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8800-
'cov': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88018780
'diff': ['torch.bool', 'torch.uint8'],
88028781
'eig': ['torch.float32'],
88038782
'empty_like': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
@@ -8812,9 +8791,8 @@ class TestConsistency(TestCase):
88128791
'fft.rfft2': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88138792
'fft.rfft': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88148793
'fft.rfftn': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8815-
'float': ['torch.bool', 'torch.float16', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8794+
'float': ['torch.int64'],
88168795
'gather': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8817-
'gradient': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32'],
88188796
'half': ['torch.bool', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88198797
'index_put': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88208798
'index_select': ['torch.uint8'],
@@ -8854,7 +8832,6 @@ class TestConsistency(TestCase):
88548832
'nn.functional.normalize': ['torch.float32'],
88558833
'nn.functional.pad': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64'],
88568834
'nn.functional.pairwise_distance': ['torch.uint8'],
8857-
'nn.functional.softsign': ['torch.int32'],
88588835
'nn.functional.triplet_margin_loss': ['torch.uint8'],
88598836
'nn.functional.triplet_margin_with_distance_loss': ['torch.uint8'],
88608837
'nn.functional.upsample_nearest': ['torch.float32'],
@@ -8880,17 +8857,9 @@ class TestConsistency(TestCase):
88808857
'tanh': ['torch.bool'],
88818858
'tensor_split': ['torch.bool', 'torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88828859
'topk': ['torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8883-
'trapz': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
8884-
'true_divide': ['torch.int32', 'torch.int64'],
8860+
'trapz': ['torch.float16', 'torch.float32', 'torch.int16', 'torch.int32', 'torch.int64', 'torch.uint8'],
88858861
'nn.functional.local_response_norm': ['torch.int64'],
8886-
'flip': ['torch.bool'],
8887-
'fliplr': ['torch.bool'],
8888-
'flipud': ['torch.bool'],
88898862
'index_select': ['torch.bool'],
8890-
'repeat': ['torch.bool'],
8891-
'rot90': ['torch.bool'],
8892-
'tile': ['torch.bool'],
8893-
'split': ['torch.float32'],
88948863
}
88958864

88968865
# Those ops worked on MacOS12, but broken on MacOS13

0 commit comments

Comments
 (0)