Skip to content

Commit

Permalink
【PIR API adaptor No.170、176】Migrate prior_box/reciprocal into pir (Pa…
Browse files Browse the repository at this point in the history
…ddlePaddle#59042)

* Migrate prior_box/reciprocal into pir

* fix

* fix

* fix bug

* fix codestyle
  • Loading branch information
ccsuzzh authored and SecretXV committed Nov 28, 2023
1 parent 8634295 commit 4a623ef
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 8 deletions.
4 changes: 2 additions & 2 deletions python/paddle/tensor/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from .. import _C_ops
from ..base.data_feeder import check_variable_and_dtype
from ..framework import LayerHelper, in_dynamic_mode, in_dynamic_or_pir_mode
from ..framework import LayerHelper, in_dynamic_or_pir_mode
from .layer_function_generator import (
add_sample_code,
generate_activation_fn,
Expand Down Expand Up @@ -792,7 +792,7 @@ def reciprocal(x, name=None):
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[-2.50000000, -5. , 10. , 3.33333325])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.reciprocal(x)
else:
check_variable_and_dtype(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/vision/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ def _is_list_or_tuple_(data):
max_sizes = [max_sizes]
cur_max_sizes = max_sizes

if in_dygraph_mode():
if in_dynamic_or_pir_mode():
step_w, step_h = steps
if max_sizes is None:
max_sizes = []
Expand Down
8 changes: 4 additions & 4 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -3383,10 +3383,10 @@ def setUp(self):
def test_check_grad(self):
if self.dtype == np.float16:
return
self.check_grad(['X'], 'Out', max_relative_error=0.01)
self.check_grad(['X'], 'Out', max_relative_error=0.01, check_pir=True)

def test_check_output(self):
self.check_output()
self.check_output(check_pir=True)


class TestReciprocal_ZeroDim(TestReciprocal):
Expand Down Expand Up @@ -4876,7 +4876,7 @@ def test_check_grad(self):
create_test_act_fp16_class(TestSoftRelu, check_dygraph=False)
create_test_act_fp16_class(TestELU)
create_test_act_fp16_class(TestCELU)
create_test_act_fp16_class(TestReciprocal)
create_test_act_fp16_class(TestReciprocal, check_pir=True)
create_test_act_fp16_class(TestLog, check_prim=True, check_pir=True)
if core.is_compiled_with_rocm():
create_test_act_fp16_class(TestLog2, check_pir=True)
Expand Down Expand Up @@ -5031,7 +5031,7 @@ def test_check_grad(self):
create_test_act_bf16_class(TestSoftRelu, check_dygraph=False)
create_test_act_bf16_class(TestELU)
create_test_act_bf16_class(TestCELU)
create_test_act_bf16_class(TestReciprocal)
create_test_act_bf16_class(TestReciprocal, check_pir=True)
create_test_act_bf16_class(TestLog, check_prim=True, check_pir=True)
if core.is_compiled_with_rocm():
create_test_act_bf16_class(TestLog2, check_pir=True)
Expand Down
4 changes: 3 additions & 1 deletion test/legacy_test/test_prior_box_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from op_test import OpTest

import paddle
from paddle.pir_utils import test_with_pir_api


def python_prior_box(
Expand Down Expand Up @@ -76,7 +77,7 @@ def set_data(self):
self.outputs = {'Boxes': self.out_boxes, 'Variances': self.out_var}

def test_check_output(self):
self.check_output()
self.check_output(check_pir=True)

def setUp(self):
self.op_type = "prior_box"
Expand Down Expand Up @@ -231,6 +232,7 @@ def setUp(self):
self.image_np = np.random.rand(2, 10, 40, 40).astype('float32')
self.min_sizes = [2.0, 4.0]

@test_with_pir_api
def test_dygraph_with_static(self):
paddle.enable_static()
input = paddle.static.data(
Expand Down

0 comments on commit 4a623ef

Please sign in to comment.