Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Frontend][PaddlePaddle] Add some activation、elementwise and reduce operators #9370

Merged
merged 4 commits into from
Oct 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions python/tvm/relay/frontend/paddlepaddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,6 +763,33 @@ def convert_pool2d(g, op, block):
g.add_node(op.output("Out")[0], out)


def convert_reduce(g, op, block):
"""Operator converter for series of reduce operators."""

op_map = {
"reduce_all": "all",
"reduce_any": "any",
"reduce_max": "max",
"reduce_min": "min",
"reduce_prod": "prod",
"reduce_sum": "sum",
"reduce_mean": "mean",
}
op_name = op_map[op.type]
input_x = g.get_node(op.input("X")[0])
axis = op.attr("dim")
if op.attr("reduce_all"):
axis = None
keepdims = op.attr("keep_dim")
out = get_relay_op(op_name)(input_x, axis=axis, keepdims=keepdims)
if not axis and not keepdims:
# use `expand_dims` to solve the following situation
# for TVM, the shape of `out` will be (, )
# for Paddle, the shape of `out` will be [1]
out = _op.expand_dims(out, axis=0)
g.add_node(op.output("Out")[0], out)


def convert_reshape(g, op, block):
"""Operator converter for reshape."""

Expand Down Expand Up @@ -900,15 +927,22 @@ def convert_unsqueeze(g, op, block):


_convert_map = {
"abs": convert_unary_op,
"acos": convert_unary_op,
"arg_max": convert_arg_max_min,
"arg_min": convert_arg_max_min,
"argsort": convert_argsort,
"asin": convert_unary_op,
"assign": convert_assign,
"assign_value": convert_assign_value,
"atan": convert_unary_op,
"batch_norm": convert_batch_norm,
"cast": convert_cast,
"ceil": convert_unary_op,
"concat": convert_concat,
"conv2d": convert_conv2d,
"cos": convert_unary_op,
"cosh": convert_unary_op,
"cumsum": convert_cumsum,
"depthwise_conv2d": convert_conv2d,
"dot": convert_dot,
Expand All @@ -918,12 +952,14 @@ def convert_unsqueeze(g, op, block):
"elementwise_mul": convert_elementwise_op,
"elementwise_sub": convert_elementwise_op,
"equal": convert_elementwise_op,
"erf": convert_unary_op,
"exp": convert_unary_op,
"expand_v2": convert_expand,
"expand_as_v2": convert_expand_as,
"feed": convert_feed,
"fill_any_like": convert_fill_any_like,
"fill_constant": convert_fill_constant,
"floor": convert_unary_op,
"gelu": convert_gelu,
"hard_sigmoid": convert_hard_sigmoid,
"hard_swish": convert_hard_swish,
Expand All @@ -932,6 +968,11 @@ def convert_unsqueeze(g, op, block):
"isnan_v2": convert_unary_op,
"layer_norm": convert_layer_norm,
"leaky_relu": convert_leaky_relu,
"less_equal": convert_elementwise_op,
"less_than": convert_elementwise_op,
"log": convert_unary_op,
"log2": convert_unary_op,
"log10": convert_unary_op,
"logical_and": convert_binary_logical_op,
"logical_or": convert_binary_logical_op,
"logical_xor": convert_binary_logical_op,
Expand All @@ -943,11 +984,26 @@ def convert_unsqueeze(g, op, block):
"pool2d": convert_pool2d,
"relu": convert_unary_op,
"reshape2": convert_reshape,
"round": convert_unary_op,
"reduce_all": convert_reduce,
"reduce_any": convert_reduce,
"reduce_max": convert_reduce,
"reduce_min": convert_reduce,
"reduce_prod": convert_reduce,
"reduce_sum": convert_reduce,
"reduce_mean": convert_reduce,
"rsqrt": convert_unary_op,
"scale": convert_scale,
"shape": convert_shape,
"sigmoid": convert_unary_op,
"sign": convert_unary_op,
"sin": convert_unary_op,
"sinh": convert_unary_op,
"slice": convert_slice,
"softmax": convert_softmax,
"sqrt": convert_unary_op,
"squeeze2": convert_squeeze,
"tan": convert_unary_op,
"tanh": convert_unary_op,
"unsqueeze2": convert_unsqueeze,
}
Expand Down Expand Up @@ -1123,6 +1179,10 @@ def from_paddle(program_or_layer, shape_dict=None, scope=None):

import paddle

# disable system signal capturing in paddle framework
# the signal capturing may cause conflict while running autotvm with paddle frontend
paddle.disable_signal_handler()

g = GraphProto()
if isinstance(program_or_layer, paddle.jit.TranslatedLayer):
# model is loaded by `paddle.jit.load`
Expand Down
49 changes: 49 additions & 0 deletions tests/python/frontend/paddlepaddle/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,8 @@ def forward(self, input1, input2):

api_list = [
"equal",
"less_equal",
"less_than",
]
x_shapes = [[128], [8, 20], [4, 20, 3], [2, 3, 8, 8], [2, 3, 3, 9, 9]]
y_shapes = [[1], [8, 20], [4, 1, 1], [2, 3, 8, 8], [2, 3, 3, 9, 1]]
Expand Down Expand Up @@ -799,6 +801,33 @@ def forward(self, inputs):
verify_model(Pad3D(padding=3, mode="replicate"), input_data=input_data)


@tvm.testing.uses_gpu
def test_forward_reduce():
class Reduce(nn.Layer):
def __init__(self, op_name, axis=None, keepdim=False):
super(Reduce, self).__init__()
self.op_name = op_name
self.axis = axis
self.keepdim = keepdim

@paddle.jit.to_static
def forward(self, inputs):
result = getattr(paddle, self.op_name)(inputs, axis=self.axis, keepdim=self.keepdim)
result = result.astype("float32")
return result

input_shapes = [[1, 2, 2, 5, 5], [2, 3, 4], [4, 20], [2, 3, 30, 30]]
for input_shape in input_shapes:
input_data = paddle.uniform(min=-3, max=3, shape=input_shape, dtype="float32")
verify_model(Reduce("all"), input_data=input_data.astype("bool"))
verify_model(Reduce("any", 1), input_data=input_data.astype("bool"))
verify_model(Reduce("max", 0, True), input_data=input_data)
verify_model(Reduce("min", 1, True), input_data=input_data)
verify_model(Reduce("prod", 0), input_data=input_data)
verify_model(Reduce("sum", 0, True), input_data=input_data)
verify_model(Reduce("mean", -1, True), input_data=input_data)


@tvm.testing.uses_gpu
def test_forward_reshape():
@paddle.jit.to_static
Expand Down Expand Up @@ -899,8 +928,28 @@ def forward(self, inputs):
return self.func(inputs)

api_list = [
"abs",
"acos",
"asin",
"atan",
"ceil",
"cos",
"cosh",
"erf",
"exp",
"floor",
"log",
"log2",
"log10",
"relu",
"round",
"rsqrt",
"sigmoid",
"sign",
"sin",
"sinh",
"sqrt",
"tan",
"tanh",
]
input_shapes = [[128], [2, 100], [10, 2, 5], [7, 3, 4, 1]]
Expand Down