diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 2465e524c702e..4f061a43bf8d8 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -2227,7 +2227,7 @@ def hinge_embedding_loss(input, label, margin=1.0, reduction='mean', name=None): return loss -def cosine_embedding_loss(input1, input2, label, margin=0, reduction='mean'): +def cosine_embedding_loss(input1, input2, label, margin=0, reduction='mean', name=None): r""" This operator computes the cosine embedding loss of Tensor ``input1``, ``input2`` and ``label`` as follows. @@ -2259,6 +2259,8 @@ def cosine_embedding_loss(input1, input2, label, margin=0, reduction='mean'): ``'none'`` | ``'mean'`` | ``'sum'``. ``'none'``: no reduction will be applied, ``'mean'``: the sum of the output will be divided by the number of elements in the output ``'sum'``: the output will be summed. + name (str, optional): Name for the operation (optional, default is None). + For more information, please refer to :ref:`api_guide_Name`. Returns: Tensor, the cosine embedding Loss of Tensor ``input1`` ``input2`` and ``label``. @@ -2324,6 +2326,6 @@ def cosine_embedding_loss(input1, input2, label, margin=0, reduction='mean'): if reduction == 'none': return out if reduction == 'mean': - return paddle.mean(out) + return paddle.mean(out, name=name) elif reduction == 'sum': - return paddle.sum(out) + return paddle.sum(out, name=name) diff --git a/python/paddle/nn/layer/loss.py b/python/paddle/nn/layer/loss.py index 7781a7323e837..04866df39e409 100644 --- a/python/paddle/nn/layer/loss.py +++ b/python/paddle/nn/layer/loss.py @@ -1334,6 +1334,8 @@ class CosineEmbeddingLoss(Layer): ``'none'`` | ``'mean'`` | ``'sum'``. ``'none'``: no reduction will be applied, ``'mean'``: the sum of the output will be divided by the number of elements in the output, ``'sum'``: the output will be summed. + name (str, optional): Name for the operation (optional, default is None). + For more information, please refer to :ref:`api_guide_Name`. Shape: input1 (Tensor): tensor with shape: [N, M] or [M], 'N' means batch size, 'M' means the length of input array. @@ -1370,7 +1372,7 @@ class CosineEmbeddingLoss(Layer): """ - def __init__(self, margin=0, reduction='mean'): + def __init__(self, margin=0, reduction='mean' ,name=None): if margin > 1 or margin < -1: raise ValueError( "The value of 'margin' should be in the interval of [-1, 1], but received %f, which is not allowed." @@ -1382,7 +1384,8 @@ def __init__(self, margin=0, reduction='mean'): super(CosineEmbeddingLoss, self).__init__() self.margin = margin self.reduction = reduction + self.name = name def forward(self, input1, input2, label): return F.cosine_embedding_loss( - input1, input2, label, margin=self.margin, reduction=self.reduction) + input1, input2, label, margin=self.margin, reduction=self.reduction, name=self.name)