Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【PaddlePaddle Hackathon 3】Add Paddle elementwise_floordiv operator #13058

Closed
wants to merge 13 commits into from
4 changes: 4 additions & 0 deletions src/core/tests/frontend/paddle/op_fuzzy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,27 +100,31 @@ static const std::vector<std::string> models{
std::string("dropout_upscale_in_train"),
std::string("elementwise_add1"),
std::string("elementwise_div1"),
std::string("elementwise_floordiv1"),
std::string("elementwise_max1"),
std::string("elementwise_min1"),
std::string("elementwise_mul1"),
std::string("elementwise_pow1"),
std::string("elementwise_sub1"),
std::string("elementwise_add2"),
std::string("elementwise_div2"),
std::string("elementwise_floordiv2"),
std::string("elementwise_max2"),
std::string("elementwise_min2"),
std::string("elementwise_mul2"),
std::string("elementwise_pow2"),
std::string("elementwise_sub2"),
std::string("elementwise_add3"),
std::string("elementwise_div3"),
std::string("elementwise_floordiv3"),
std::string("elementwise_max3"),
std::string("elementwise_min3"),
std::string("elementwise_mul3"),
std::string("elementwise_pow3"),
std::string("elementwise_sub3"),
std::string("elementwise_add4"),
std::string("elementwise_div4"),
std::string("elementwise_floordiv4"),
std::string("elementwise_max4"),
std::string("elementwise_min4"),
std::string("elementwise_mul4"),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import sys
import numpy as np
import paddle

from save_model import saveModel


def elementwise_floordiv(name: str, x, y):
paddle.enable_static()

with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()):
node_x = paddle.static.data(name='x', shape=x.shape, dtype=x.dtype)
node_y = paddle.static.data(name='y', shape=y.shape, dtype=y.dtype)
out = paddle.floor_divide(node_x, node_y)

cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])

# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())
outs = exe.run(
feed={'x': x, 'y': y},
fetch_list=[out])
saveModel(name, exe, feedkeys=['x', 'y'], fetchlist=[out], inputs=[x, y], outputs=[outs[0]],
target_dir=sys.argv[1])

return outs[0]


def main():
in_dtype = 'int64'
data_x = np.array([2, 3, 4]).astype(in_dtype)
data_y = np.array([1, 5, 2]).astype(in_dtype)
elementwise_floordiv("elementwise_floordiv1", data_x, data_y)

# input negative value
data_x = np.array([-2, -3, -4]).astype(in_dtype)
data_y = np.array([-1, -5, -2]).astype(in_dtype)
elementwise_floordiv("elementwise_floordiv2", data_x, data_y)

# data_y's shape is the continuous subsequence of data_x's shape
data_x = np.random.randint(1, 5, size=[2, 3, 4, 5]).astype(in_dtype)
data_y = np.random.randint(1, 5, size=[4, 5]).astype(in_dtype)
elementwise_floordiv("elementwise_floordiv3", data_x, data_y)

data_y = np.random.randint(1, 5, size=[5]).astype(in_dtype)
elementwise_floordiv("elementwise_floordiv4", data_x, data_y)


if __name__ == "__main__":
main()
26 changes: 26 additions & 0 deletions src/frontends/paddle/src/op/elementwise_ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,32 @@ NamedOutputs elementwise_greater_equal(const NodeContext& node_context) {
return elementwise_ops<default_opset::GreaterEqual>(node_context);
}

NamedOutputs elementwise_floordiv(const NodeContext& node_context) {
auto x = node_context.get_input("X");
auto y = node_context.get_input("Y");
const auto axis = node_context.get_attribute<int>("axis", -1);

PADDLE_OP_CHECK(node_context, x.get_partial_shape().rank().is_static(), "elementwise_ops: X rank must be static!");
PADDLE_OP_CHECK(node_context, y.get_partial_shape().rank().is_static(), "elementwise_ops: Y rank must be static!");
int64_t x_rank = x.get_partial_shape().rank().get_length();
int64_t y_rank = y.get_partial_shape().rank().get_length();

if ((axis == -1) || (axis == x_rank - 1) || (x_rank == y_rank)) {
return node_context.default_single_output_mapping({std::make_shared<default_opset::Divide>(x, y, true)}, {"Out"});
} else {
std::vector<int64_t> indices;
for (int64_t i = 0; i < axis; i++)
indices.push_back(i);
for (int64_t i = y_rank + axis; i < x_rank; i++)
indices.push_back(i);

auto indices_node = default_opset::Constant::create(ov::element::i64, ov::Shape{indices.size()}, indices);
auto y_node = std::make_shared<default_opset::Unsqueeze>(y, indices_node);
return node_context.default_single_output_mapping({std::make_shared<default_opset::Divide>(x, y_node, true)}, {"Out"});
}
}
}

} // namespace op
} // namespace paddle
} // namespace frontend
Expand Down
2 changes: 2 additions & 0 deletions src/frontends/paddle/src/op_table.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ OP_CONVERTER(dropout);
OP_CONVERTER(elementwise_add);
OP_CONVERTER(elementwise_div);
OP_CONVERTER(elementwise_equal);
OP_CONVERTER(elementwise_floordiv);
OP_CONVERTER(elementwise_greater_equal);
OP_CONVERTER(elementwise_max);
OP_CONVERTER(elementwise_min);
Expand Down Expand Up @@ -121,6 +122,7 @@ std::map<std::string, CreatorFunction> get_supported_ops() {
{"dropout", op::dropout},
{"elementwise_add", op::elementwise_add},
{"elementwise_div", op::elementwise_div},
{"elementwise_floordiv", op::elementwise_floordiv},
{"elementwise_max", op::elementwise_max},
{"elementwise_min", op::elementwise_min},
{"elementwise_mul", op::elementwise_mul},
Expand Down