Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PaddlePaddle Hackathon] add paddle.nn.ClipGradByGlobalNorm单测 #277

Merged
merged 41 commits into from
Oct 29, 2021
Merged
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
5d6082b
paddle.nn.PixelShuffle单测提交
justld Oct 10, 2021
2be3336
提交paddle.nn.PixelShuffle单测案例
justld Oct 10, 2021
cd161f4
add test of paddle.nn.ClipGradByGlobalNorm
justld Oct 10, 2021
b1af939
add test paddle.nn.ClipGradByNorm
justld Oct 10, 2021
151bdb8
add test of paddle.nn.PixelShuffle
justld Oct 10, 2021
fa2c79e
add test of paddle.nn.ClipGradByGlobalNorm and paddle.nn.ClipGradByNorm
justld Oct 10, 2021
7736df7
remove useless obj and class in test_clip_grad_by_global_norm.py and …
justld Oct 11, 2021
710ef80
Merge branch 'develop' into develop
justld Oct 11, 2021
d315fde
add test of paddle.nn.UpsampingBinlinear2D
justld Oct 11, 2021
0613680
remove unused code in test_flip_grad_by_global_norm.py
justld Oct 11, 2021
b7ce3bd
remove unused code in test_clip_grad_by_norm.py
justld Oct 11, 2021
078620c
add code annotation in test_clip_grad_by_global_norm.py
justld Oct 12, 2021
eafb154
add code annotation in test_clip_grad_by_norm.py
justld Oct 12, 2021
1978407
add code annotation in test_pixel_shuffle.py
justld Oct 12, 2021
143384b
add annotation in test_upsampling_bilinear2D.py
justld Oct 12, 2021
9d028b3
Merge branch 'develop' into develop
justld Oct 12, 2021
6a61fef
Merge branch 'PaddlePaddle:develop' into develop
justld Oct 13, 2021
3498837
add paddle.ClipGradByGlobalNorm test case
justld Oct 13, 2021
6563226
Merge branch 'develop' of github.com:justld/PaddleTest into develop
justld Oct 13, 2021
dd88b27
add paddle.nn.ClipGradByNorm test case
justld Oct 13, 2021
57b2dab
add paddle.nn.PixelShuffle test case
justld Oct 13, 2021
7667896
add paddle.nn.UpsamplingBilinear2D test case
justld Oct 13, 2021
54749f1
Merge branch 'develop' into develop
justld Oct 13, 2021
4042681
fix bug in test_clip_grad_by_norm.py
justld Oct 13, 2021
946fc10
Merge branch 'develop' of github.com:justld/PaddleTest into develop
justld Oct 13, 2021
fc0b49d
remove 3 test casse
justld Oct 13, 2021
2c12d1e
fix annotation
justld Oct 14, 2021
19edbf7
Merge branch 'develop' into develop
justld Oct 14, 2021
57b4e8e
Merge branch 'develop' into develop
justld Oct 14, 2021
df80e6f
Merge branch 'PaddlePaddle:develop' into develop
justld Oct 15, 2021
98f5d30
refine exception raise code
justld Oct 15, 2021
ddfebee
Merge branch 'develop' into develop
justld Oct 16, 2021
3a2f853
Merge branch 'develop' into develop
justld Oct 18, 2021
e25f69c
Merge branch 'develop' into develop
justld Oct 19, 2021
655823b
Merge branch 'develop' into develop
DDDivano Oct 22, 2021
e9ee331
Merge branch 'develop' into develop
DDDivano Oct 22, 2021
1974736
Merge branch 'develop' into develop
DDDivano Oct 25, 2021
f27bedc
add test of paddle.nn.ClipGradByGlobalNorm
justld Oct 25, 2021
fa0d66e
Merge branch 'develop' into test_ClipGradByGlobalNorm
justld Oct 25, 2021
ef658a9
add test case of paddle.nn.ClipGradByGlobalNorm
justld Oct 25, 2021
a0b6154
Merge branch 'develop' into test_ClipGradByGlobalNorm
DDDivano Oct 29, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
327 changes: 327 additions & 0 deletions framework/api/nn/test_clip_grad_by_global_norm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,327 @@
#!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test_clip_grad_by_global_norm
"""

from apibase import randtool, compare
import paddle
import pytest
import numpy as np


def numpy_clip_grad_by_global_norm(test_data, clip_norm):
"""
ClipGradByGlobalNorm implemented by numpy.
"""
cliped_data = []
grad_data = []
for data, grad in test_data:
grad_data.append(grad)
global_norm = np.sqrt(np.sum(np.square(np.array(grad_data))))
if global_norm > clip_norm:
for data, grad in test_data:
grad = grad * clip_norm / global_norm
cliped_data.append((data, grad))
else:
cliped_data = test_data
return cliped_data


def generate_test_data(length, shape, dtype="float32", value=10):
"""
generate test data
"""
tensor_data = []
numpy_data = []
np.random.seed(100)
for i in range(length):
np_weight = randtool("float", -value, value, shape).astype(dtype)
np_weight_grad = randtool("float", -value, value, shape).astype(dtype)
numpy_data.append((np_weight, np_weight_grad))

tensor_weight = paddle.to_tensor(np_weight)
tensor_weight_grad = paddle.to_tensor(np_weight_grad)
tensor_data.append((tensor_weight, tensor_weight_grad))
return numpy_data, tensor_data


@pytest.mark.api_nn_ClipGradByGlobalNorm_vartype
def test_clip_grad_by_global_norm_base():
"""
Test base.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 4
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_parameters
def test_clip_grad_by_global_norm1():
"""
Test ClipGradByGlobalNorm when input shape changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input grad shape: [10, 10] -> [9, 13, 11]

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [9, 13, 11]
length = 4
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_parameters
def test_clip_grad_by_global_norm2():
"""
Test ClipGradByGlobalNorm when input shape changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input grad shape: [10, 10] -> [10]

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10]
length = 4
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_parameters
def test_clip_grad_by_global_norm3():
"""
Test ClipGradByGlobalNorm when clip_norm changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
clip_norm: 1.0 -> -1.0

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 4
clip_norm = -1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_parameters
def test_clip_grad_by_global_norm4():
"""
Test ClipGradByGlobalNorm when set group_name.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
group_name: 'test_group'

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 4
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm, group_name="test_group")
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_parameters
def test_clip_grad_by_global_norm5():
"""
Test ClipGradByGlobalNorm when value range changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
value range: [-10, 10] -> [-255555, 255555]

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 4
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=255555)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_vartype
def test_clip_grad_by_global_norm6():
"""
Test ClipGradByGlobalNorm when input data dtype changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input data dtype: float32 -> float64

Expected Results:
The output of ClipGradByGlobalNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 4
clip_norm = 1.0
dtype = "float64"
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
np_res = numpy_clip_grad_by_global_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByGlobalNorm_vartype
def test_clip_grad_by_global_norm7():
"""
Test ClipGradByGlobalNorm when input data dtype changes.

Test base config:
input grad shape = [10, 10]
input grad number = 4
input data dtype = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input data dtype: float32 -> ['int8', 'int16', 'int32', 'float16']

Expected Results:
paddle.nn.ClipGradByGlobalNorm cann't accept input data with 'float16', raise RuntimeError.
"""
shape = [10, 10]
length = 4
clip_norm = 1.0
unsupport_dtypes = ["int8", "int16", "int32", "float16"]
paddle_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
for dtype in unsupport_dtypes:
np_data, paddle_data = generate_test_data(length, shape, dtype=dtype, value=10)
try:
paddle_clip(paddle_data)
except RuntimeError:
pass
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

使用apibase里面的exception方法

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

您好,apibase的exception运行时,425行代码 _check_params函数会把data通过paddle.to_tensor转换成tensor,但是paddle.nn.ClipGradByGlobalNorm实例调用时传入的参数类型是list[tuple, tuple, tuple...],会导致运行出错,所以才使用try except这种方式捕获异常。如果使用exception捕获异常的话,应该怎么修改呢?