Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
a162837 committed Nov 10, 2024
1 parent 59af9fd commit 4411448
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 13 deletions.
67 changes: 54 additions & 13 deletions python/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -3705,6 +3705,7 @@ def log10_(x: Tensor, name: str | None = None) -> Tensor:
if in_dynamic_mode():
return _C_ops.log10_(x)


def check_clip_tensor(c_x, value, re_value, value_type, name):
if value is None:
value = paddle.full_like(c_x, re_value, value_type)
Expand All @@ -3727,6 +3728,7 @@ def check_clip_tensor(c_x, value, re_value, value_type, name):
value = paddle.full_like(c_x, value, value_type)
return value


def clip(
x: Tensor,
min: float | Tensor | None = None,
Expand Down Expand Up @@ -3788,18 +3790,36 @@ def clip(
max_ = float(np.finfo(np.float32).max)
tensor_dtype = 'float32'

if (isinstance(min, Variable) and (len(min.shape) > 1 or (len(min.shape == 1) and min.shape[-1] != 1))) or (isinstance(max, Variable) and (len(max.shape) > 1 or (len(max.shape == 1) and max.shape[-1] != 1))):
if (
isinstance(min, Variable)
and (len(min.shape) > 1 or (len(min.shape == 1) and min.shape[-1] != 1))
) or (
isinstance(max, Variable)
and (len(max.shape) > 1 or (len(max.shape == 1) and max.shape[-1] != 1))
):
min = paddle.full_like(x, min_, tensor_dtype) if min is None else min
max = paddle.full_like(x, max_, tensor_dtype) if max is None else max
min = paddle.full_like(x, min, tensor_dtype) if not isinstance(min, Variable) else min
max = paddle.full_like(x, max, tensor_dtype) if not isinstance(max, Variable) else max
min = (
paddle.full_like(x, min, tensor_dtype)
if not isinstance(min, Variable)
else min
)
max = (
paddle.full_like(x, max, tensor_dtype)
if not isinstance(max, Variable)
else max
)

if (len(min.shape) == 1 and min.shape[-1] == 0) or min.shape != x.shape[-len(min.shape) :]:
if (len(min.shape) == 1 and min.shape[-1] == 0) or min.shape != x.shape[
-len(min.shape) :
]:
raise ValueError(
f"The min dimension should be equal to the inner dimension of the x, but the min dimension is {min.shape}"
)

if (len(max.shape) == 1 and max.shape[-1] == 0) or max.shape != x.shape[-len(max.shape) :]:

if (len(max.shape) == 1 and max.shape[-1] == 0) or max.shape != x.shape[
-len(max.shape) :
]:
raise ValueError(
f"The max dimension should be equal to the inner dimension of the x, but the max dimension is {max.shape}"
)
Expand Down Expand Up @@ -3861,7 +3881,10 @@ def clip(
dtype=helper.input_dtype('x')
)
helper.append_op(
type='clip', inputs=inputs, outputs={'Out': [output]}, attrs=attrs
type='clip',
inputs=inputs,
outputs={'Out': [output]},
attrs=attrs,
)

return output
Expand All @@ -3882,18 +3905,36 @@ def clip_(
fmax = float(np.finfo(np.float32).max)
tensor_dtype = 'float32'

if (isinstance(min, Variable) and (len(min.shape) > 1 or (len(min.shape == 1) and min.shape[-1] != 1))) or (isinstance(max, Variable) and (len(max.shape) > 1 or (len(max.shape == 1) and max.shape[-1] != 1))):
if (
isinstance(min, Variable)
and (len(min.shape) > 1 or (len(min.shape == 1) and min.shape[-1] != 1))
) or (
isinstance(max, Variable)
and (len(max.shape) > 1 or (len(max.shape == 1) and max.shape[-1] != 1))
):
min = paddle.full_like(x, fmin, tensor_dtype) if min is None else min
max = paddle.full_like(x, fmax, tensor_dtype) if max is None else max
min = paddle.full_like(x, min, tensor_dtype) if not isinstance(min, Variable) else min
max = paddle.full_like(x, max, tensor_dtype) if not isinstance(max, Variable) else max
min = (
paddle.full_like(x, min, tensor_dtype)
if not isinstance(min, Variable)
else min
)
max = (
paddle.full_like(x, max, tensor_dtype)
if not isinstance(max, Variable)
else max
)

if (len(min.shape) == 1 and min.shape[-1] == 0) or min.shape != x.shape[-len(min.shape) :]:
if (len(min.shape) == 1 and min.shape[-1] == 0) or min.shape != x.shape[
-len(min.shape) :
]:
raise ValueError(
f"The min dimension should be equal to the inner dimension of the x, but the min dimension is {min.shape}"
)

if (len(max.shape) == 1 and max.shape[-1] == 0) or max.shape != x.shape[-len(max.shape) :]:

if (len(max.shape) == 1 and max.shape[-1] == 0) or max.shape != x.shape[
-len(max.shape) :
]:
raise ValueError(
f"The max dimension should be equal to the inner dimension of the x, but the max dimension is {max.shape}"
)
Expand Down
5 changes: 5 additions & 0 deletions test/legacy_test/test_clip_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import paddle


class TestClipTenosr(unittest.TestCase):

def test_shape_error(self):
Expand All @@ -25,12 +26,14 @@ def test_min_error():
x = paddle.randn([3, 5, 8, 10], dtype='float16')
min = paddle.randn([8, 3], dtype='float16')
paddle.clip(x, min)

self.assertRaises(ValueError, test_min_error)

def test_max_error():
x = paddle.randn([3, 5, 8, 10], dtype='float32')
max = paddle.randn([8, 3], dtype='float32')
paddle.clip(x, -5.0, max)

self.assertRaises(ValueError, test_max_error)


Expand All @@ -42,12 +45,14 @@ def test_min_error():
x = paddle.randn([3, 5, 8, 10], dtype='float16')
min = paddle.randn([8, 3], dtype='float16')
paddle.clip_(x, min)

self.assertRaises(ValueError, test_min_error)

def test_max_error():
x = paddle.randn([3, 5, 8, 10], dtype='float32')
max = paddle.randn([8, 3], dtype='float32')
paddle.clip_(x, -5.0, max)

self.assertRaises(ValueError, test_max_error)


Expand Down

0 comments on commit 4411448

Please sign in to comment.