Skip to content

Commit

Permalink
[PIR]Add rsqrt to new ir & op_test passed (4/4) (PaddlePaddle#57325)
Browse files Browse the repository at this point in the history
* change eager_op_test to op_test & delete new IR log

* refine code

* rsqrt new ir op_test passed (4/4)

* fix rename bug
  • Loading branch information
changeyoung98 authored Sep 15, 2023
1 parent 21b64ca commit bd5a814
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 5 deletions.
4 changes: 2 additions & 2 deletions python/paddle/tensor/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from .. import _C_ops
from ..base.data_feeder import check_variable_and_dtype
from ..framework import LayerHelper, in_dynamic_mode
from ..framework import LayerHelper, in_dynamic_mode, in_dynamic_or_pir_mode
from .layer_function_generator import (
add_sample_code,
generate_activation_fn,
Expand Down Expand Up @@ -878,7 +878,7 @@ def rsqrt(x, name=None):
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[3.16227770, 2.23606801, 1.82574177, 1.58113885])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.rsqrt(x)
else:
check_variable_and_dtype(
Expand Down
9 changes: 6 additions & 3 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -1578,7 +1578,7 @@ def if_enable_cinn(self):
pass

def test_check_output(self):
self.check_output(check_prim=True)
self.check_output(check_prim=True, check_new_ir=True)

def test_check_grad(self):
if self.dtype == np.float16:
Expand All @@ -1588,6 +1588,7 @@ def test_check_grad(self):
'Out',
max_relative_error=0.0005,
check_prim=True,
check_new_ir=True,
)


Expand Down Expand Up @@ -4508,7 +4509,9 @@ def test_check_grad(self):
TestLeakyReluAlpha3, check_prim=True, enable_cinn=True
)
create_test_act_fp16_class(TestLeakyRelu_ZeroDim, check_prim=True)
create_test_act_fp16_class(TestRsqrt, check_prim=True, enable_cinn=True)
create_test_act_fp16_class(
TestRsqrt, check_prim=True, enable_cinn=True, check_new_ir=True
)


def create_test_act_bf16_class(
Expand Down Expand Up @@ -4631,7 +4634,7 @@ def test_check_grad(self):
create_test_act_bf16_class(TestLeakyReluAlpha2, check_prim=True)
create_test_act_bf16_class(TestLeakyReluAlpha3, check_prim=True)
create_test_act_bf16_class(TestLeakyRelu_ZeroDim, check_prim=True)
create_test_act_bf16_class(TestRsqrt, check_prim=True)
create_test_act_bf16_class(TestRsqrt, check_prim=True, check_new_ir=True)

if __name__ == "__main__":
unittest.main()

0 comments on commit bd5a814

Please sign in to comment.