Skip to content

Commit

Permalink
add bfloat16 test (#155)
Browse files Browse the repository at this point in the history
Co-authored-by: Bowen12992 <zhangbluestars@gmail.com>
  • Loading branch information
Bowen12992 and Bowen12992 authored Aug 7, 2024
1 parent 48942a0 commit 2e55d66
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion tests/test_distribution_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def test_accuracy_normal(shape, dtype):


@pytest.mark.parametrize("shape", DISTRIBUTION_SHAPES)
@pytest.mark.parametrize("dtype", [torch.float16, torch.float32])
@pytest.mark.parametrize("dtype", FLOAT_DTYPES)
def test_accuracy_uniform(shape, dtype):
x = torch.randn(size=shape, dtype=dtype, device="cuda")
with flag_gems.use_gems():
Expand Down
4 changes: 2 additions & 2 deletions tests/test_tensor_constructor_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


@pytest.mark.parametrize("shape", DISTRIBUTION_SHAPES)
@pytest.mark.parametrize("dtype", [torch.float16, torch.float32])
@pytest.mark.parametrize("dtype", FLOAT_DTYPES)
def test_accuracy_rand(shape, dtype):
with flag_gems.use_gems():
res_out = torch.rand(shape, dtype=dtype, device="cuda")
Expand All @@ -21,7 +21,7 @@ def test_accuracy_rand(shape, dtype):


@pytest.mark.parametrize("shape", DISTRIBUTION_SHAPES)
@pytest.mark.parametrize("dtype", [torch.float16, torch.float32])
@pytest.mark.parametrize("dtype", FLOAT_DTYPES)
def test_accuracy_randn(shape, dtype):
with flag_gems.use_gems():
res_out = torch.randn(shape, dtype=dtype, device="cuda")
Expand Down

0 comments on commit 2e55d66

Please sign in to comment.