diff --git a/fbgemm_gpu/experimental/gemm/test/fp8_gemm_test.py b/fbgemm_gpu/experimental/gemm/test/fp8_gemm_test.py index 1c78f065d4..89f510e001 100644 --- a/fbgemm_gpu/experimental/gemm/test/fp8_gemm_test.py +++ b/fbgemm_gpu/experimental/gemm/test/fp8_gemm_test.py @@ -156,6 +156,8 @@ def _quantize_matmul_fp8( expected_result = a @ b.T if use_bias: + # pyre-fixme[6]: For 1st argument expected `Union[bool, complex, + # float, int, Tensor]` but got `Optional[Tensor]`. expected_result += bias self.assertTrue( torch.allclose(result, expected_result, atol=2e-1, rtol=5e-2)