Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ OP_SAME_OPERANDS_AND_RESULT(Polygamma_)
OP_SAME_OPERANDS_AND_RESULT(EnableCheckModelNanInf)
OP_SAME_OPERANDS_AND_RESULT(ViewShape)
OP_SAME_OPERANDS_AND_RESULT(Silu)
OP_SAME_OPERANDS_AND_RESULT(Silu_)
OP_SAME_OPERANDS_AND_RESULT(ViewDtype)
OP_SAME_OPERANDS_AND_RESULT(FusedSoftmaxMaskUpperTriangle)
OP_SAME_OPERANDS_AND_RESULT(Gammaln)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ OP_DECLARE_INFER_SYMBOLIC_SHAPE(ShadowFeed)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(ShareData_)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Sign)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Silu)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Silu_)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Sin)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Sin_)
OP_DECLARE_INFER_SYMBOLIC_SHAPE(Sinh)
Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/ops/yaml/ops.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5062,12 +5062,13 @@

- op : silu
args : (Tensor x)
output : Tensor
output : Tensor(out)
infer_meta :
func : UnchangedInferMeta
spmd_rule : ElementwiseUnaryInferSpmd
kernel :
func : silu
inplace : (x -> out)
backward : silu_grad
interfaces : paddle::dialect::LayoutTransformationInterface, paddle::dialect::InferSymbolicShapeInterface
traits: pir::UnaryElementWiseTrait
Expand Down
16 changes: 14 additions & 2 deletions python/paddle/nn/functional/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1080,7 +1080,7 @@ def selu(


@param_one_alias(["x", "input"])
def silu(x: Tensor, name: str | None = None) -> Tensor:
def silu(x: Tensor, inplace: bool = False, name: str | None = None) -> Tensor:
r"""
silu activation

Expand All @@ -1097,6 +1097,7 @@ def silu(x: Tensor, name: str | None = None) -> Tensor:
Parameters:
x (Tensor): The input Tensor with data type bfloat16, float16, float32, float64, complex64, complex128.
alias: ``input``.
inplace (bool, optional): Whether to use inplace operation. Default: False.
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.

Returns:
Expand All @@ -1113,10 +1114,21 @@ def silu(x: Tensor, name: str | None = None) -> Tensor:
>>> print(out)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])

>>> out = F.silu(x, True)
>>> print(out)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])
>>> print(x)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])
"""

if in_dynamic_or_pir_mode():
return _C_ops.silu(x)
if inplace:
return _C_ops.silu_(x)
else:
return _C_ops.silu(x)
else:
check_variable_and_dtype(
x,
Expand Down
19 changes: 16 additions & 3 deletions python/paddle/nn/layer/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1263,6 +1263,7 @@ class Silu(Layer):
Where :math:`x` is the input Tensor.

Parameters:
inplace (bool, optional): Whether to use inplace operation. Default: False.
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.

Shape:
Expand All @@ -1280,17 +1281,29 @@ class Silu(Layer):
>>> print(out)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])

>>> m = paddle.nn.Silu(True)
>>> out = m(x)
>>> print(out)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])
>>> print(x)
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.73105860, 1.76159406, 2.85772228, 3.92805505])
"""

def __init__(self, name: str | None = None) -> str:
def __init__(self, inplace: bool = False, name: str | None = None) -> str:
super().__init__()
self._name = name
self._inplace = inplace

def forward(self, x: Tensor) -> Tensor:
return F.silu(x, self._name)
return F.silu(x, self._inplace, self._name)

def extra_repr(self) -> str:
name_str = f'name={self._name}' if self._name else ''
name_str = f'inplace={self._inplace}' + (
f', name={self._name}' if self._name else ''
)
return name_str


Expand Down
Loading