Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix 123 unittests by autofix #358

Merged
merged 11 commits into from
Jan 12, 2024
Merged
779 changes: 552 additions & 227 deletions paconvert/api_mapping.json

Large diffs are not rendered by default.

26 changes: 26 additions & 0 deletions tests/test_logcumsumexp.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,29 @@ def test_case_5():
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_6():
pytorch_code = textwrap.dedent(
"""
import torch
x = torch.tensor([[0.56, 0.34, 0.78], [0.98, 0.21, 1.78]])
out = torch.tensor([[0.56, 0.34, 0.78], [0.98, 0.21, 1.78]])
result = torch.logcumsumexp(input=x, dim=1, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
x = torch.tensor([[0.56, 0.34, 0.78], [0.98, 0.21, 1.78]])
out = torch.tensor([[0.56, 0.34, 0.78], [0.98, 0.21, 1.78]])
result = torch.logcumsumexp(out=out, dim=1, input=x)
"""
)
obj.run(pytorch_code, ["result", "out"])
28 changes: 28 additions & 0 deletions tests/test_logical_and.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,31 @@ def _test_case_6():
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_and(input=a, other=b, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_and(out=out, other=b, input=a)
"""
)
obj.run(pytorch_code, ["result", "out"])
26 changes: 26 additions & 0 deletions tests/test_logical_not.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,29 @@ def test_case_5():
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_6():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_not(input=a, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_not(out=out, input=a)
"""
)
obj.run(pytorch_code, ["result", "out"])
28 changes: 28 additions & 0 deletions tests/test_logical_or.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,31 @@ def _test_case_6():
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_or(input=a, other=b, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_or(out=out, other=b, input=a)
"""
)
obj.run(pytorch_code, ["result", "out"])
28 changes: 28 additions & 0 deletions tests/test_logical_xor.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,31 @@ def _test_case_6():
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_xor(input=a, other=b, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
a = torch.tensor([0, 1, 10, 0], dtype=torch.float32)
b = torch.tensor([4, 0, 1, 0], dtype=torch.float32)
out = torch.tensor([True, False, True, True])
result = torch.logical_xor(out=out, other=b, input=a)
"""
)
obj.run(pytorch_code, ["result", "out"])
39 changes: 39 additions & 0 deletions tests/test_logit.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,42 @@ def test_case_4():
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_4
def test_case_5():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
out = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
result = torch.logit(input=input, eps=1e-6, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_4
def test_case_6():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
out = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
result = torch.logit(out=out, eps=1e-6, input=input)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_4
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
out = torch.tensor([0.2796, 0.9331, 0.6486, 0.1523, 0.6516])
result = torch.logit(input)
"""
)
obj.run(pytorch_code, ["result", "out"])
23 changes: 23 additions & 0 deletions tests/test_logspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,26 @@ def test_case_7():
"""
)
obj.run(pytorch_code, ["result", "out"])


def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
out = torch.tensor([1., 2, 3], dtype=torch.float64)
result = torch.logspace(-1, 3, 4, base=10.0, dtype=torch.float64, out=out, layout=torch.strided, device=None, requires_grad=False)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_8
def test_case_9():
pytorch_code = textwrap.dedent(
"""
import torch
out = torch.tensor([1., 2, 3], dtype=torch.float64)
result = torch.logspace(start=-1, end=3, steps=4, base=10.0, out=out, dtype=torch.float64, layout=torch.strided, device=None, requires_grad=False)
"""
)
obj.run(pytorch_code, ["result", "out"])
39 changes: 39 additions & 0 deletions tests/test_logsumexp.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,42 @@ def _test_case_6():
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
out = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
result = torch.logsumexp(input, 1, True)
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
out = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
result = torch.logsumexp(input=input, dim=1, keepdim=True, out=out)
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_9():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
out = torch.tensor([[1.4907, 1.0593, 1.5696], [1.4907, 1.0593, 1.5696]])
result = torch.logsumexp(out=out, keepdim=True, dim=1, input=input)
"""
)
obj.run(pytorch_code, ["result"])
28 changes: 28 additions & 0 deletions tests/test_lt.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,3 +86,31 @@ def test_case_6():
"""
)
obj.run(pytorch_code, ["result"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_7():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([[1, 2], [3, 4]])
other = torch.tensor([[1, 2], [3, 4]])
out = torch.tensor([True, False])
result = torch.lt(input=input, other=other, out=out)
"""
)
obj.run(pytorch_code, ["result", "out"])


# generated by validate_unittest autofix, based on test_case_5
def test_case_8():
pytorch_code = textwrap.dedent(
"""
import torch
input = torch.tensor([[1, 2], [3, 4]])
other = torch.tensor([[1, 2], [3, 4]])
out = torch.tensor([True, False])
result = torch.lt(out=out, other=other, input=input)
"""
)
obj.run(pytorch_code, ["result", "out"])
Loading