diff --git a/python/paddle/tensor/ops.py b/python/paddle/tensor/ops.py index eae4afabb6357..54bceddab1b0c 100644 --- a/python/paddle/tensor/ops.py +++ b/python/paddle/tensor/ops.py @@ -17,7 +17,7 @@ from .. import _C_ops from ..base.data_feeder import check_variable_and_dtype -from ..framework import LayerHelper, in_dynamic_mode, in_dynamic_or_pir_mode +from ..framework import LayerHelper, in_dynamic_or_pir_mode from .layer_function_generator import ( add_sample_code, generate_activation_fn, @@ -792,7 +792,7 @@ def reciprocal(x, name=None): Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True, [-2.50000000, -5. , 10. , 3.33333325]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.reciprocal(x) else: check_variable_and_dtype( diff --git a/python/paddle/vision/ops.py b/python/paddle/vision/ops.py index a0edbe561a08e..b6106ef827f18 100755 --- a/python/paddle/vision/ops.py +++ b/python/paddle/vision/ops.py @@ -510,7 +510,7 @@ def _is_list_or_tuple_(data): max_sizes = [max_sizes] cur_max_sizes = max_sizes - if in_dygraph_mode(): + if in_dynamic_or_pir_mode(): step_w, step_h = steps if max_sizes is None: max_sizes = [] diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index ace22b1388040..d4d120dc2696e 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -3383,10 +3383,10 @@ def setUp(self): def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out', max_relative_error=0.01) + self.check_grad(['X'], 'Out', max_relative_error=0.01, check_pir=True) def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) class TestReciprocal_ZeroDim(TestReciprocal): @@ -4876,7 +4876,7 @@ def test_check_grad(self): create_test_act_fp16_class(TestSoftRelu, check_dygraph=False) create_test_act_fp16_class(TestELU) create_test_act_fp16_class(TestCELU) -create_test_act_fp16_class(TestReciprocal) +create_test_act_fp16_class(TestReciprocal, check_pir=True) create_test_act_fp16_class(TestLog, check_prim=True, check_pir=True) if core.is_compiled_with_rocm(): create_test_act_fp16_class(TestLog2, check_pir=True) @@ -5031,7 +5031,7 @@ def test_check_grad(self): create_test_act_bf16_class(TestSoftRelu, check_dygraph=False) create_test_act_bf16_class(TestELU) create_test_act_bf16_class(TestCELU) -create_test_act_bf16_class(TestReciprocal) +create_test_act_bf16_class(TestReciprocal, check_pir=True) create_test_act_bf16_class(TestLog, check_prim=True, check_pir=True) if core.is_compiled_with_rocm(): create_test_act_bf16_class(TestLog2, check_pir=True) diff --git a/test/legacy_test/test_prior_box_op.py b/test/legacy_test/test_prior_box_op.py index 733219b326a12..4c23b3b5d5037 100644 --- a/test/legacy_test/test_prior_box_op.py +++ b/test/legacy_test/test_prior_box_op.py @@ -19,6 +19,7 @@ from op_test import OpTest import paddle +from paddle.pir_utils import test_with_pir_api def python_prior_box( @@ -76,7 +77,7 @@ def set_data(self): self.outputs = {'Boxes': self.out_boxes, 'Variances': self.out_var} def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def setUp(self): self.op_type = "prior_box" @@ -231,6 +232,7 @@ def setUp(self): self.image_np = np.random.rand(2, 10, 40, 40).astype('float32') self.min_sizes = [2.0, 4.0] + @test_with_pir_api def test_dygraph_with_static(self): paddle.enable_static() input = paddle.static.data(