Skip to content

Commit

Permalink
【PIR API adaptor No.16、18】 Migrate paddle.atan, paddle.atanh into pir (
Browse files Browse the repository at this point in the history
  • Loading branch information
MarioLulab authored and zeroRains committed Nov 8, 2023
1 parent 327c4e6 commit 23611e4
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 8 deletions.
4 changes: 2 additions & 2 deletions python/paddle/tensor/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def atan(x, name=None):
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[-0.38050640, -0.19739556, 0.09966865, 0.29145682])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.atan(x)
else:
check_variable_and_dtype(
Expand Down Expand Up @@ -477,7 +477,7 @@ def atanh(x, name=None):
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[-0.42364895, -0.20273255, 0.10033534, 0.30951962])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.atanh(x)
else:
check_variable_and_dtype(
Expand Down
19 changes: 13 additions & 6 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -852,11 +852,15 @@ def setUp(self):
self.outputs = {'Out': out}
self.convert_input_output()

def test_check_output(self):
self.check_output(check_pir=True)

def test_check_grad(self):
if self.dtype == np.float16:
return
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_pir=True)

@test_with_pir_api
def test_out_name(self):
with static_guard():
with base.program_guard(base.Program()):
Expand Down Expand Up @@ -2297,10 +2301,13 @@ def setUp(self):
def init_shape(self):
self.shape = [10, 12]

def test_check_output(self):
self.check_output(check_pir=True)

def test_check_grad(self):
if self.dtype == np.float16:
return
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_pir=True)


class TestAtanh_Complex64(TestAtanh):
Expand Down Expand Up @@ -4736,10 +4743,10 @@ def test_check_grad(self):
create_test_act_fp16_class(TestSin, check_pir=True)
create_test_act_fp16_class(TestSinh)
create_test_act_fp16_class(TestAsin)
create_test_act_fp16_class(TestAtan)
create_test_act_fp16_class(TestAtan, check_pir=True)
create_test_act_fp16_class(TestAcosh, check_pir=True)
create_test_act_fp16_class(TestAsinh)
create_test_act_fp16_class(TestAtanh)
create_test_act_fp16_class(TestAtanh, check_pir=True)
create_test_act_fp16_class(TestRound, grad_check=False, check_pir=True)
create_test_act_fp16_class(
TestRelu, check_prim=True, enable_cinn=True, check_pir=True
Expand Down Expand Up @@ -4890,10 +4897,10 @@ def test_check_grad(self):
create_test_act_bf16_class(TestSin, check_pir=True)
create_test_act_bf16_class(TestSinh)
create_test_act_bf16_class(TestAsin)
create_test_act_bf16_class(TestAtan)
create_test_act_bf16_class(TestAtan, check_pir=True)
create_test_act_bf16_class(TestAcosh, check_pir=True)
create_test_act_bf16_class(TestAsinh)
create_test_act_bf16_class(TestAtanh)
create_test_act_bf16_class(TestAtanh, check_pir=True)
create_test_act_bf16_class(TestRound, grad_check=False, check_pir=True)
create_test_act_bf16_class(TestRelu, check_prim=True, check_pir=True)
create_test_act_bf16_class(
Expand Down

0 comments on commit 23611e4

Please sign in to comment.