Skip to content

Commit

Permalink
feat: new ir for fmax and fmin (#58596)
Browse files Browse the repository at this point in the history
  • Loading branch information
xiaoyewww authored Nov 3, 2023
1 parent 5dc7e01 commit 376a3eb
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 18 deletions.
4 changes: 2 additions & 2 deletions python/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -1357,7 +1357,7 @@ def fmax(x, y, name=None):
Tensor(shape=[3], dtype=float32, place=Place(cpu), stop_gradient=True,
[5. , 3. , inf.])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.fmax(x, y)
else:
return _elementwise_op(LayerHelper('elementwise_fmax', **locals()))
Expand Down Expand Up @@ -1421,7 +1421,7 @@ def fmin(x, y, name=None):
Tensor(shape=[3], dtype=float64, place=Place(cpu), stop_gradient=True,
[ 1. , -inf., 5. ])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.fmin(x, y)
else:
return _elementwise_op(LayerHelper('elementwise_fmin', **locals()))
Expand Down
22 changes: 14 additions & 8 deletions test/legacy_test/test_fmax_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import paddle
from paddle.base import core
from paddle.pir_utils import test_with_pir_api


class ApiFMaxTest(unittest.TestCase):
Expand All @@ -43,6 +44,7 @@ def setUp(self):
self.np_expected3 = np.fmax(self.input_a, self.input_c)
self.np_expected4 = np.fmax(self.input_b, self.input_c)

@test_with_pir_api
def test_static_api(self):
"""test_static_api"""
paddle.enable_static()
Expand Down Expand Up @@ -145,11 +147,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)

def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
Expand All @@ -158,6 +160,7 @@ def test_check_grad_ingore_x(self):
'Out',
max_relative_error=0.005,
no_grad_set=set("X"),
check_pir=True,
)

def test_check_grad_ingore_y(self):
Expand All @@ -167,6 +170,7 @@ def test_check_grad_ingore_y(self):
'Out',
max_relative_error=0.005,
no_grad_set=set('Y'),
check_pir=True,
)


Expand All @@ -190,11 +194,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)

def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
Expand All @@ -203,6 +207,7 @@ def test_check_grad_ingore_x(self):
'Out',
max_relative_error=0.005,
no_grad_set=set("X"),
check_pir=True,
)

def test_check_grad_ingore_y(self):
Expand All @@ -212,6 +217,7 @@ def test_check_grad_ingore_y(self):
'Out',
max_relative_error=0.005,
no_grad_set=set('Y'),
check_pir=True,
)


Expand All @@ -234,11 +240,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)


@unittest.skipIf(
Expand All @@ -263,11 +269,11 @@ def setUp(self):

def test_check_output(self):
place = core.CUDAPlace(0)
self.check_output_with_place(place)
self.check_output_with_place(place, check_pir=True)

def test_check_grad(self):
place = core.CUDAPlace(0)
self.check_grad_with_place(place, ['X', 'Y'], 'Out')
self.check_grad_with_place(place, ['X', 'Y'], 'Out', check_pir=True)


if __name__ == "__main__":
Expand Down
22 changes: 14 additions & 8 deletions test/legacy_test/test_fmin_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import paddle
from paddle.base import core
from paddle.pir_utils import test_with_pir_api

paddle.enable_static()

Expand All @@ -45,6 +46,7 @@ def setUp(self):
self.np_expected3 = np.fmin(self.input_a, self.input_c)
self.np_expected4 = np.fmin(self.input_b, self.input_c)

@test_with_pir_api
def test_static_api(self):
"""test_static_api"""
paddle.enable_static()
Expand Down Expand Up @@ -147,11 +149,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)

def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
Expand All @@ -160,6 +162,7 @@ def test_check_grad_ingore_x(self):
'Out',
max_relative_error=0.005,
no_grad_set=set("X"),
check_pir=True,
)

def test_check_grad_ingore_y(self):
Expand All @@ -169,6 +172,7 @@ def test_check_grad_ingore_y(self):
'Out',
max_relative_error=0.005,
no_grad_set=set('Y'),
check_pir=True,
)


Expand All @@ -192,11 +196,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)

def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
Expand All @@ -205,6 +209,7 @@ def test_check_grad_ingore_x(self):
'Out',
max_relative_error=0.005,
no_grad_set=set("X"),
check_pir=True,
)

def test_check_grad_ingore_y(self):
Expand All @@ -214,6 +219,7 @@ def test_check_grad_ingore_y(self):
'Out',
max_relative_error=0.005,
no_grad_set=set('Y'),
check_pir=True,
)


Expand All @@ -236,11 +242,11 @@ def setUp(self):

def test_check_output(self):
"""test_check_output"""
self.check_output()
self.check_output(check_pir=True)

def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
self.check_grad(['X', 'Y'], 'Out', check_pir=True)


@unittest.skipIf(
Expand All @@ -265,11 +271,11 @@ def setUp(self):

def test_check_output(self):
place = core.CUDAPlace(0)
self.check_output_with_place(place)
self.check_output_with_place(place, check_pir=True)

def test_check_grad(self):
place = core.CUDAPlace(0)
self.check_grad_with_place(place, ['X', 'Y'], 'Out')
self.check_grad_with_place(place, ['X', 'Y'], 'Out', check_pir=True)


if __name__ == "__main__":
Expand Down

0 comments on commit 376a3eb

Please sign in to comment.