Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion test/legacy_test/ernie_utils/moe_all_gather_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ def __init__(
group_experts=False,
moe_statics=None,
):

super().__init__(
gate,
experts,
Expand Down
1 change: 1 addition & 0 deletions test/legacy_test/ernie_utils/moe_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
Returns:
_type_: _description_
"""

from __future__ import annotations

import logging
Expand Down
6 changes: 0 additions & 6 deletions test/legacy_test/hygon_dcu/hygon_llama_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,6 @@ def test_check_gradient(self):

# test mean op
class TestFP16MeanOp(OpTest):

def setUp(self):
self.op_type = "mean"
self.python_api = paddle.mean
Expand Down Expand Up @@ -441,7 +440,6 @@ def test_checkout_grad(self):

# test scale op
class TestScaleFp16Op(OpTest):

def setUp(self):
self.op_type = "scale"
self.python_api = paddle.scale
Expand All @@ -466,7 +464,6 @@ def test_check_grad(self):

# test sum op
class TestAFP16SumOp(OpTest):

def setUp(self):
self.op_type = "sum"
self.python_api = paddle.add_n
Expand Down Expand Up @@ -528,7 +525,6 @@ def test_check_output(self):

# test add, add_grad op
class TestFP16ElementwiseAddOp(OpTest):

def setUp(self):
self.op_type = "elementwise_add"
self.python_api = paddle.add
Expand Down Expand Up @@ -612,7 +608,6 @@ def test_check_grad_ignore_y(self):

# test multiply, multiply_grad op
class TestElementwiseMulOpFp16(OpTest):

def setUp(self):
self.op_type = "elementwise_mul"
self.prim_op_type = "prim"
Expand Down Expand Up @@ -791,7 +786,6 @@ def test_check_output(self):

# test matmul, matmul_grad op
class TestMatMulV2Op(OpTest):

def config(self):
self.x_shape = (100,)
self.y_shape = (100,)
Expand Down
8 changes: 0 additions & 8 deletions test/legacy_test/test_uniform_random_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,6 @@ def init_dtype(self):


class TestUniformRandomOpError(unittest.TestCase):

def test_errors(self):
paddle.enable_static()
main_prog = Program()
Expand Down Expand Up @@ -305,7 +304,6 @@ def check_with_place(self, place):


class TestUniformRandomOpApi(unittest.TestCase):

def test_api(self):
paddle.enable_static()
paddle.seed(10)
Expand Down Expand Up @@ -343,7 +341,6 @@ def test_api(self):


class TestUniformRandomOp_attr_tensor_API(unittest.TestCase):

def test_attr_tensor_API(self):
paddle.enable_static()
startup_program = base.Program()
Expand Down Expand Up @@ -402,7 +399,6 @@ def test_attr_tensor_int32_API(self):


class TestUniformRandomOp_API_seed(unittest.TestCase):

def test_attr_tensor_API(self):
paddle.enable_static()
_seed = 10
Expand Down Expand Up @@ -456,7 +452,6 @@ def check_with_place(self, place):


class TestUniformRandomOpSelectedRowsShapeTensorList(unittest.TestCase):

def test_check_output(self):
for place in get_places():
self.check_with_place(place)
Expand Down Expand Up @@ -493,7 +488,6 @@ def test_check_output(self):


class TestUniformRandomBatchSizeLikeOpError(unittest.TestCase):

def test_errors(self):
paddle.enable_static()
main_prog = Program()
Expand Down Expand Up @@ -527,7 +521,6 @@ def test_dtype():


class TestUniformAlias(unittest.TestCase):

def test_alias(self):
paddle.uniform([2, 3], min=-5.0, max=5.0)
paddle.tensor.uniform([2, 3], min=-5.0, max=5.0)
Expand All @@ -540,7 +533,6 @@ def test_uniform_random():


class TestUniformOpError(unittest.TestCase):

def test_errors(self):
paddle.enable_static()
main_prog = Program()
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_unpool1d_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,6 @@ def test_case(self):


class TestUnpool1DOpAPI_static(unittest.TestCase):

def test_case(self):
paddle.enable_static()
for place in get_places():
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_unpool3d_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,6 @@ def test_case(self):


class TestUnpool3DOpAPI_static(unittest.TestCase):

def test_case(self):
paddle.enable_static()
for place in get_places():
Expand Down
3 changes: 0 additions & 3 deletions test/legacy_test/test_unpool_indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,6 @@ def test_case(self):


class TestUnpool1DAPI_st(unittest.TestCase):

def test_case(self):
paddle.enable_static()
for place in get_places():
Expand Down Expand Up @@ -455,7 +454,6 @@ def test_case(self):


class TestUnpool2DAPI_st(unittest.TestCase):

def test_case(self):
paddle.enable_static()
for place in get_places():
Expand Down Expand Up @@ -542,7 +540,6 @@ def test_case(self):


class TestUnpool3DAPI_st2(unittest.TestCase):

def test_case(self):
paddle.enable_static()
for place in get_places():
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_unpool_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,6 @@ def test_case(self):


class TestUnpoolOpAPI_st(unittest.TestCase):

def test_case(self):
import paddle
import paddle.nn.functional as F
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_unstack_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ def test_check_grad(self):


class TestUnstackZeroInputOp(unittest.TestCase):

def unstack_zero_input_static(self):
paddle.enable_static()

Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_variance_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def test_alias(self):


class TestVarError(unittest.TestCase):

def test_error(self):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data('X', [2, 3, 4], 'int32')
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_warpctc_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,6 @@ def test_check_grad(self):


class TestWarpCTCOpError(unittest.TestCase):

def test_errors(self):
paddle.enable_static()
main_program = paddle.static.Program()
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_warprnnt_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,6 @@ def test_check_grad(self):


class TestWarpRNNTOpError(unittest.TestCase):

def test_errors1(self):
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_while_loop_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,6 @@ def body(i, ten, test_dict, test_list, test_list_dict):


class TestApiWhileLoop_Nested(unittest.TestCase):

@compare_legacy_with_pt
def test_nested_net(self):
def external_cond(i, j, init, sums):
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_while_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,6 @@ def test_bad_x():


class TestIgnoreVarNameInWhile(unittest.TestCase):

def test_ignore_var(self):
def cond(i, ten, temp, y):
return i < ten
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_yolo_box_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,6 @@ def test_dygraph(self):


class TestYoloBoxStatic(unittest.TestCase):

def test_static(self):
x1 = paddle.static.data('x1', [2, 14, 8, 8], 'float32')
img_size = paddle.static.data('img_size', [2, 2], 'int32')
Expand Down
1 change: 0 additions & 1 deletion test/legacy_test/test_yolov3_loss_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,6 @@ def test_dygraph(self):


class TestYolov3LossStatic(unittest.TestCase):

def test_static(self):
x = paddle.static.data('x', [2, 14, 8, 8], 'float32')
gt_box = paddle.static.data('gt_box', [2, 10, 4], 'float32')
Expand Down
1 change: 1 addition & 0 deletions test/quantization/test_trace_quanter.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""The quantizer layers should be traced by paddle.jit.save function."""

import os
import tempfile
import unittest
Expand Down
1 change: 0 additions & 1 deletion test/quantization/test_weight_only_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -932,7 +932,6 @@ def test_weightonly_linear_backward(
"quantized_matmul requires CUDA >= 11.2 and CUDA_ARCH >= 8",
)
class WeightOnlyLinear_stream_k_TestCase(unittest.TestCase):

def test_weightonly_linear_backward_int4(self):
def test_weightonly_linear_backward(
self, algo='weight_only_int4', weight_dtype='int4'
Expand Down
1 change: 0 additions & 1 deletion test/sequence/test_sequence_mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ def initParameters(self):


class TestSequenceMaskOpError(unittest.TestCase):

def test_errors(self):
paddle.enable_static()
with paddle.static.program_guard(
Expand Down
1 change: 0 additions & 1 deletion test/sot/test_guard_fastpath_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def test_guard(self):
# subsequent guard checks will be skipped to improve performance.
# The related logic is implemented in the OpcodeExecutorCache class.
with EnvironmentVariableGuard(ENV_SOT_UNSAFE_CACHE_FASTPATH, True):

self.assertTrue(ENV_SOT_UNSAFE_CACHE_FASTPATH.get())

self.assertFalse(
Expand Down