Skip to content

Commit

Permalink
xpu-paddlepaddle-33 [任务] matmul单测 timeout
Browse files Browse the repository at this point in the history
test=kunlun
  • Loading branch information
taixiurong committed Jul 15, 2022
1 parent ec38be6 commit 22ad13f
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 2 deletions.
4 changes: 3 additions & 1 deletion python/paddle/fluid/tests/unittests/xpu/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,7 @@ foreach(TEST_OP ${DIST_TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP})
endforeach()

set_tests_properties(test_mul_op_xpu PROPERTIES TIMEOUT 120)
set_tests_properties(test_conv2d_op_xpu PROPERTIES TIMEOUT 120)
set_tests_properties(test_mul_op_xpu PROPERTIES TIMEOUT 120)
set_tests_properties(test_matmul_v2_op_xpu PROPERTIES TIMEOUT 900)
set_tests_properties(test_matmul_op_xpu PROPERTIES TIMEOUT 300)
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@
xpu_test_op_type_white_list = [
'dropout_float16',
'dropout_grad_float16',
'matmul_v2_float16',
"grad_add_float32" # no api for grad_add, skip
]
xpu_test_device_op_white_list = []
Expand Down
20 changes: 20 additions & 0 deletions python/paddle/fluid/tests/unittests/xpu/test_matmul_op_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,10 @@ def setUp(self):
self.op_type = "matmul"
self.dtype = np.float32 if not hasattr(self,
'in_type') else self.in_type

self.__class__.no_need_check_grad = False if not hasattr(
self, 'no_need_check_grad') else self.no_need_check_grad

shape_X = [4, 5] if not hasattr(self, 'shape_X') else self.shape_X
shape_Y = [5, 6] if not hasattr(self, 'shape_Y') else self.shape_Y
transpose_X = False if not hasattr(self,
Expand All @@ -314,19 +318,31 @@ def test_check_output(self):
self.check_output_with_place(place, atol=1e-3)

def test_check_grad_normal(self):
if hasattr(self.__class__, "no_need_check_grad"
) and self.__class__.no_need_check_grad == True:
return

place = paddle.XPUPlace(0)
self.check_grad_with_place(place, ['X', 'Y'],
'Out',
max_relative_error=5e-2)

def test_check_grad_ignore_x(self):
if hasattr(self.__class__, "no_need_check_grad"
) and self.__class__.no_need_check_grad == True:
return

place = paddle.XPUPlace(0)
self.check_grad_with_place(place, ['Y'],
'Out',
max_relative_error=5e-2,
no_grad_set=set("X"))

def test_check_grad_ignore_y(self):
if hasattr(self.__class__, "no_need_check_grad"
) and self.__class__.no_need_check_grad == True:
return

place = paddle.XPUPlace(0)
self.check_grad_with_place(place, ['X'],
'Out',
Expand All @@ -351,6 +367,9 @@ def dynamic_create_class(self):
for transose_x in [True, False]:
for transose_y in [True, False]:
for batch in batch_size:
no_need_check_grad = False
if batch >= 5:
no_need_check_grad = True
class_name = (
'TestMatMulOp_dimX_{}_dim_Y_{}_transX_{}_transY_{}_batch_{}'
.format(dim_X, dim_Y, transose_x, transose_y,
Expand All @@ -362,6 +381,7 @@ def dynamic_create_class(self):
'shape_Y': shape_y,
'transpose_X': transose_x,
'transpose_Y': transose_y,
'no_need_check_grad': no_need_check_grad,
'op_type': "matmul"
}
classes.append([class_name, attr_dict])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ def setUp(self):
self.dtype = self.in_type
self.config()
self.op_type = "matmul_v2"
if self.dtype == np.float16 or self.dtype == "float16":
self.__class__.no_need_check_grad = True
x = np.random.random(self.x_shape).astype(self.dtype)
y = np.random.random(self.y_shape).astype(self.dtype)
# -0.1 ~ 0.1
Expand All @@ -99,6 +101,9 @@ def test_check_output(self):
self.check_output_with_place(place)

def test_check_grad(self):
if hasattr(self.__class__, "no_need_check_grad"
) and self.__class__.no_need_check_grad == True:
return
place = paddle.XPUPlace(0)
self.check_grad_with_place(place, ['X', 'Y'], 'Out')

Expand Down

0 comments on commit 22ad13f

Please sign in to comment.