Skip to content

Commit

Permalink
【PaddlePaddle Hackathon 4】add paddle softshrink op (#15845)
Browse files Browse the repository at this point in the history
* add softshrink

* Modify the implementation

* fix input unsigned

* fix name typo

* fix unsigned

* fix selct

* add input dtype check

---------

Co-authored-by: Bo Liu <bo4.liu@intel.com>
  • Loading branch information
AndPuQing and liubo-intel authored May 4, 2023
1 parent c1933fc commit bc6402f
Show file tree
Hide file tree
Showing 4 changed files with 106 additions and 0 deletions.
46 changes: 46 additions & 0 deletions src/frontends/paddle/src/op/softshrink.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "default_opset.hpp"
#include "openvino/frontend/paddle/node_context.hpp"

namespace ov {
namespace frontend {
namespace paddle {
namespace op {
NamedOutputs softshrink(const NodeContext& node) {
auto data = node.get_input("X");
const float lambda = node.get_attribute<float>("lambda", 0.5f);
const auto input_element_type = data.get_element_type();
PADDLE_OP_CHECK(node, lambda >= 0, "Softshrink op lambda must be non-negative.");
PADDLE_OP_CHECK(node, input_element_type.is_signed(), "Softshrink op input must be signed type.");

std::shared_ptr<ngraph::Node> output;
const auto positive_lambda = default_opset::Constant::create(input_element_type, Shape{}, {lambda});
const auto negative_lambda = default_opset::Constant::create(input_element_type, Shape{}, {-lambda});
std::shared_ptr<ngraph::Node> negative_node = std::make_shared<default_opset::Subtract>(data, positive_lambda);
std::shared_ptr<ngraph::Node> positive_node = std::make_shared<default_opset::Add>(data, positive_lambda);

std::shared_ptr<ngraph::Node> zero_node = default_opset::Constant::create(input_element_type, Shape{}, {0});

// Create masks for values below negative lambda and above positive lambda
std::shared_ptr<ngraph::Node> values_below_neg_lambda =
std::make_shared<default_opset::Less>(data, negative_lambda);
std::shared_ptr<ngraph::Node> values_above_pos_lambda =
std::make_shared<default_opset::Greater>(data, positive_lambda);

output = std::make_shared<default_opset::Select>(values_above_pos_lambda, negative_node, data);
output = std::make_shared<default_opset::Select>(values_below_neg_lambda, positive_node, output);

std::shared_ptr<ngraph::Node> zero_mask =
std::make_shared<default_opset::LogicalOr>(values_below_neg_lambda, values_above_pos_lambda);

output = std::make_shared<default_opset::Select>(zero_mask, output, zero_node);

return node.default_single_output_mapping({output}, {"Out"});
}
} // namespace op
} // namespace paddle
} // namespace frontend
} // namespace ov
2 changes: 2 additions & 0 deletions src/frontends/paddle/src/op_table.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ OP_CONVERTER(shape);
OP_CONVERTER(slice);
OP_CONVERTER(softmax);
OP_CONVERTER(softplus);
OP_CONVERTER(softshrink);
OP_CONVERTER(sigmoid);
OP_CONVERTER(split);
OP_CONVERTER(sqrt);
Expand Down Expand Up @@ -209,6 +210,7 @@ std::map<std::string, CreatorFunction> get_supported_ops() {
{"slice", op::slice},
{"softmax", op::softmax},
{"softplus", op::softplus},
{"softshrink", op::softshrink},
{"sigmoid", op::sigmoid},
{"split", op::split},
{"sqrt", op::sqrt},
Expand Down
2 changes: 2 additions & 0 deletions src/frontends/paddle/tests/op_fuzzy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -454,6 +454,8 @@ static const std::vector<std::string> models{
std::string("softmax"),
std::string("softmax_minus"),
std::string("softplus_default_params"),
std::string("softshrink_default_params"),
std::string("softshrink_threshold_0.6"),
std::string("split_test1"),
std::string("split_test2"),
std::string("split_test3"),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

#
# softshrink paddle model generator
#
import numpy as np
import sys
from save_model import saveModel


def softshrink(name: str, x, threshold):
import paddle

paddle.enable_static()

node_x = paddle.static.data(name="x", shape=x.shape, dtype="float32")
if threshold == None:
out = paddle.nn.functional.softshrink(node_x)
else:
out = paddle.nn.functional.softshrink(node_x, threshold)

cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())

outs = exe.run(feed={"x": x}, fetch_list=[out])

saveModel(
name,
exe,
feedkeys=["x"],
fetchlist=[out],
inputs=[x],
outputs=[outs[0]],
target_dir=sys.argv[1],
)

return outs[0]


def main():
data = np.array(
[
[[2.0, 3.0, 4.0, 5.0], [0.0, 4.0, -5.0, 6.0], [7.0, -8.0, 8.0, 9.0]],
[[-1.0, 2.0, 3.0, 4.0], [-5.0, 6.0, 7.0, 8.0], [6.0, 7.0, 8.0, 9.0]],
]
).astype(np.float32)

softshrink("softshrink_default_params", data, threshold=None)
softshrink("softshrink_threshold_0.6", data, threshold=0.6)


if __name__ == "__main__":
main()

0 comments on commit bc6402f

Please sign in to comment.