Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【Paddle Hackathon No.11】 #45595

Merged
merged 14 commits into from
Oct 13, 2022
Merged
348 changes: 348 additions & 0 deletions python/paddle/fluid/tests/unittests/test_multimarginloss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,348 @@
# -*- coding: utf-8 -*
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import paddle
import numpy as np
import unittest


def call_MultiMarginLoss_layer(
input,
label,
p=1,
margin=1.0,
weight=None,
reduction='mean',
):
triplet_margin_loss = paddle.nn.MultiMarginLoss(p=p,
margin=margin,
weight=weight,
reduction=reduction)
res = triplet_margin_loss(
input=input,
label=label,
)
return res


def call_MultiMarginLoss_functional(
input,
label,
p=1,
margin=1.0,
weight=None,
reduction='mean',
):
res = paddle.nn.functional.multi_margin_loss(input=input,
label=label,
p=p,
margin=margin,
weight=weight,
reduction=reduction)
return res


def test_static(place,
input_np,
label_np,
p=1,
margin=1.0,
weight_np=None,
reduction='mean',
functional=False):
prog = paddle.static.Program()
startup_prog = paddle.static.Program()
with paddle.static.program_guard(prog, startup_prog):
input = paddle.static.data(name='input',

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For test cases in static mode, you'd better add some cases that paddle.static.data creates data layer that has dynamic shape to ensure that it works as expected.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I add the new test cases of checking data shape in static mode. but I'm not quite sure about it.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I mean, you can create an data layer with shape [-1, -1] and feed it with an array with shape [3, 4] or [5, 6].
Or create a data layer with shape [-1, 3,4] and feed it with an array with shape [7, 3, 4]

I the rank and all the static size matches, it is compatible.

shape=input_np.shape,
dtype=input_np.dtype)
label = paddle.static.data(name='label',
shape=label_np.shape,
dtype=label_np.dtype)
feed_dict = {
"input": input_np,
"label": label_np,
}
weight = None
if weight_np is not None:
weight = paddle.static.data(name='weight',
shape=weight_np.shape,
dtype=weight_np.dtype)
feed_dict['weight'] = weight_np
if functional:
res = call_MultiMarginLoss_functional(input=input,
label=label,
p=p,
margin=margin,
weight=weight,
reduction=reduction)
else:
res = call_MultiMarginLoss_layer(input=input,
label=label,
p=p,
margin=margin,
weight=weight,
reduction=reduction)

exe = paddle.static.Executor(place)
static_result = exe.run(prog, feed=feed_dict, fetch_list=[res])
return static_result


def test_dygraph(place,
input,
label,
p=1,
margin=1.0,
weight=None,
reduction='mean',
functional=False):
paddle.disable_static()
input = paddle.to_tensor(input)
label = paddle.to_tensor(label)

if weight is not None:
weight = paddle.to_tensor(weight)
if functional:
dy_res = call_MultiMarginLoss_functional(input=input,
label=label,
p=p,
margin=margin,
weight=weight,
reduction=reduction)
else:
dy_res = call_MultiMarginLoss_layer(input=input,
label=label,
p=p,
margin=margin,
weight=weight,
reduction=reduction)
dy_result = dy_res.numpy()
paddle.enable_static()
return dy_result


def calc_multi_margin_loss(
input,
label,
p=1,
margin=1.0,
weight=None,
reduction='mean',
):
label = label.reshape(-1, 1)
index_sample = []
for i in range(len(label)):
index_sample.append(input[i, label[i]])
index_sample = np.array(index_sample).reshape(-1, 1)

if weight is None:
expected = np.mean(np.maximum(margin + input - index_sample, 0.0)**p,
axis=1) - margin**p / input.shape[1]
else:
weight = weight.reshape(-1, 1)
iclementine marked this conversation as resolved.
Show resolved Hide resolved
expected = np.mean(np.maximum(weight * (margin + input - index_sample), 0.0) ** p, axis=1) - margin ** p / \
input.shape[1]

if reduction == 'mean':
expected = np.mean(expected)
elif reduction == 'sum':
expected = np.sum(expected)
else:
expected = expected

return expected


class TestMultiMarginLoss(unittest.TestCase):

def test_MultiMarginLoss(self):
shape = (2, 2)
input = np.random.uniform(0.1, 0.8, size=shape).astype(np.float64)
label = np.random.uniform(0, input.shape[1],
size=(2, )).astype(np.int32)

places = [paddle.CPUPlace()]
if paddle.device.is_compiled_with_cuda():
places.append(paddle.CUDAPlace(0))
reductions = ['sum', 'mean', 'none']
for place in places:
for reduction in reductions:
expected = calc_multi_margin_loss(input=input,
label=label,
reduction=reduction)

dy_result = test_dygraph(
place=place,
input=input,
label=label,
reduction=reduction,
)

static_result = test_static(
place=place,
input_np=input,
label_np=label,
reduction=reduction,
)
self.assertTrue(np.allclose(static_result, expected))
self.assertTrue(np.allclose(static_result, dy_result))
self.assertTrue(np.allclose(dy_result, expected))
static_functional = test_static(place=place,
input_np=input,
label_np=label,
reduction=reduction,
functional=True)
dy_functional = test_dygraph(place=place,
input=input,
label=label,
reduction=reduction,
functional=True)
self.assertTrue(np.allclose(static_functional, expected))
self.assertTrue(np.allclose(static_functional, dy_functional))
self.assertTrue(np.allclose(dy_functional, expected))

def test_MultiMarginLoss_error(self):
paddle.disable_static()
self.assertRaises(ValueError,
paddle.nn.MultiMarginLoss,
reduction="unsupport reduction")
input = paddle.to_tensor([[0.1, 0.3]], dtype='float32')
label = paddle.to_tensor([0], dtype='int32')
self.assertRaises(ValueError,
paddle.nn.functional.multi_margin_loss,
input=input,
label=label,
reduction="unsupport reduction")
paddle.enable_static()

def test_MultiMarginLoss_dimension(self):
paddle.disable_static()

input = paddle.to_tensor([[0.1, 0.3], [1, 2]], dtype='float32')
label = paddle.to_tensor([0, 1, 1], dtype='int32')

self.assertRaises(
ValueError,
paddle.nn.functional.multi_margin_loss,
input=input,
label=label,
)
MMLoss = paddle.nn.MultiMarginLoss()
self.assertRaises(
ValueError,
MMLoss,
input=input,
label=label,
)
paddle.enable_static()

def test_MultiMarginLoss_p(self):
p = 2
shape = (2, 2)
reduction = 'mean'
place = paddle.CPUPlace()
input = np.random.uniform(0.1, 0.8, size=shape).astype(np.float64)
label = np.random.uniform(0, input.shape[1],
size=(2, )).astype(np.int64)
expected = calc_multi_margin_loss(input=input,
p=p,
label=label,
reduction=reduction)

dy_result = test_dygraph(
place=place,
p=p,
input=input,
label=label,
reduction=reduction,
)

static_result = test_static(
place=place,
p=p,
input_np=input,
label_np=label,
reduction=reduction,
)
self.assertTrue(np.allclose(static_result, expected))
self.assertTrue(np.allclose(static_result, dy_result))
self.assertTrue(np.allclose(dy_result, expected))
static_functional = test_static(place=place,
p=p,
input_np=input,
label_np=label,
reduction=reduction,
functional=True)
dy_functional = test_dygraph(place=place,
p=p,
input=input,
label=label,
reduction=reduction,
functional=True)
self.assertTrue(np.allclose(static_functional, expected))
self.assertTrue(np.allclose(static_functional, dy_functional))
self.assertTrue(np.allclose(dy_functional, expected))

def test_MultiMarginLoss_weight(self):
shape = (2, 2)
iclementine marked this conversation as resolved.
Show resolved Hide resolved
reduction = 'mean'
place = paddle.CPUPlace()
input = np.random.uniform(0.1, 0.8, size=shape).astype(np.float64)
label = np.random.uniform(0, input.shape[1],
size=(2, )).astype(np.int64)
weight = np.random.uniform(0, 2, size=(2, )).astype(np.float64)
expected = calc_multi_margin_loss(input=input,
label=label,
weight=weight,
reduction=reduction)

dy_result = test_dygraph(
place=place,
input=input,
label=label,
weight=weight,
reduction=reduction,
)

static_result = test_static(
place=place,
input_np=input,
label_np=label,
weight_np=weight,
reduction=reduction,
)
self.assertTrue(np.allclose(static_result, expected))
self.assertTrue(np.allclose(static_result, dy_result))
self.assertTrue(np.allclose(dy_result, expected))
static_functional = test_static(place=place,
input_np=input,
label_np=label,
weight_np=weight,
reduction=reduction,
functional=True)
dy_functional = test_dygraph(place=place,
input=input,
label=label,
weight=weight,
reduction=reduction,
functional=True)
self.assertTrue(np.allclose(static_functional, expected))
self.assertTrue(np.allclose(static_functional, dy_functional))
self.assertTrue(np.allclose(dy_functional, expected))


if __name__ == "__main__":
unittest.main()
2 changes: 2 additions & 0 deletions python/paddle/nn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@
from .layer.loss import SmoothL1Loss # noqa: F401
from .layer.loss import HingeEmbeddingLoss # noqa: F401
from .layer.loss import CosineEmbeddingLoss # noqa: F401
from .layer.loss import MultiMarginLoss
from .layer.loss import TripletMarginWithDistanceLoss
from .layer.loss import TripletMarginLoss
from .layer.loss import SoftMarginLoss
Expand Down Expand Up @@ -319,6 +320,7 @@ def weight_norm(*args):
'Identity',
'CosineEmbeddingLoss',
'RReLU',
'MultiMarginLoss',
'TripletMarginWithDistanceLoss',
'TripletMarginLoss',
'SoftMarginLoss',
Expand Down
2 changes: 2 additions & 0 deletions python/paddle/nn/functional/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@
from .loss import ctc_loss # noqa: F401
from .loss import hinge_embedding_loss # noqa: F401
from .loss import cosine_embedding_loss # noqa: F401
from .loss import multi_margin_loss
from .loss import multi_label_soft_margin_loss
from .loss import triplet_margin_with_distance_loss
from .loss import triplet_margin_loss
Expand Down Expand Up @@ -241,5 +242,6 @@
'rrelu',
'triplet_margin_with_distance_loss',
'triplet_margin_loss',
'multi_margin_loss',
'soft_margin_loss',
]
Loading