Skip to content

Commit

Permalink
Merge pull request #362 from AndPuQing/add_OP-1
Browse files Browse the repository at this point in the history
【PaddlePaddle Hackathon】在Paddle2ONNX 新增11个 Paddle 2.0 API 支持
  • Loading branch information
jiangjiajun authored Oct 9, 2021
2 parents 539e4ab + a45d76f commit d81df05
Show file tree
Hide file tree
Showing 15 changed files with 1,218 additions and 12 deletions.
46 changes: 34 additions & 12 deletions paddle2onnx/op_mapper/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ def opset_9(cls, graph, node, **kw):

slope_node = node.input('Alpha')[0]
if len(input_shape) != len(slope_shape):
assert len(slope_shape) == 1, "Slope shape is not expected for prelu"
assert len(
slope_shape) == 1, "Slope shape is not expected for prelu"
shape_node = graph.make_node('Shape', inputs=node.input('X'))
axes = [i for i in range(len(input_shape))]
del axes[1]
Expand All @@ -102,7 +103,8 @@ def opset_13(cls, graph, node, **kw):

slope_node = node.input('Alpha')[0]
if len(input_shape) != len(slope_shape):
assert len(slope_shape) == 1, "Slope shape is not expected for prelu"
assert len(
slope_shape) == 1, "Slope shape is not expected for prelu"
shape_node = graph.make_node('Shape', inputs=node.input('X'))
value = [i for i in range(len(input_shape))]
del value[1]
Expand All @@ -117,14 +119,14 @@ def opset_13(cls, graph, node, **kw):
inputs=[node.input('X')[0], slope_node],
outputs=node.output('Out'))


@op_mapper('relu6')
class Relu6():
support_opset_verison_range = (1, 12)

@classmethod
def opset_1(cls, graph, node, **kw):
mapper_helper.clip_helper(graph,
node.input('X', 0),
mapper_helper.clip_helper(graph, node.input('X', 0),
node.attr('threshold'), 0.0,
node.output('Out', 0))

Expand All @@ -151,6 +153,20 @@ def opset_7(cls, graph, node, **kw):
'Mul', inputs=[x, zero_point_five], outputs=node.output('Out'))


@op_mapper('selu')
class Selu():
support_opset_version_range = (6, 12)

@classmethod
def opset_6(cls, graph, node, **kw):
graph.make_node(
'Selu',
inputs=node.input('X'),
alpha=node.attr('alpha'),
gamma=node.attr('scale'),
outputs=node.output('Out'))


@op_mapper('hard_sigmoid')
class HardSigmoid():
support_opset_verison_range = (1, 12)
Expand All @@ -175,8 +191,10 @@ class Swish():
def opset_7(cls, graph, node, **kw):
beta_node = graph.make_node(
'Constant',
attrs={'dtype': dtypes.ONNX.FLOAT,
'value': [node.attr('beta')]})
attrs={
'dtype': dtypes.ONNX.FLOAT,
'value': [node.attr('beta')]
})
beta_x_node = graph.make_node(
'Mul', inputs=[node.input('X')[0], beta_node])
sigmoid_node = graph.make_node('Sigmoid', inputs=[beta_x_node])
Expand All @@ -194,16 +212,20 @@ class HardSwish():
def opset_7(cls, graph, node, **kw):
scale_node = graph.make_node(
'Constant',
attrs={'dtype': dtypes.ONNX.FLOAT,
'value': node.attr('scale')})
attrs={
'dtype': dtypes.ONNX.FLOAT,
'value': node.attr('scale')
})
offset_node = graph.make_node(
'Constant',
attrs={'dtype': dtypes.ONNX.FLOAT,
'value': node.attr('offset')})
attrs={
'dtype': dtypes.ONNX.FLOAT,
'value': node.attr('offset')
})

node0 = graph.make_node('Add', inputs=[node.input('X')[0], offset_node])
node1 = mapper_helper.clip_helper(graph, node0,
node.attr('threshold'), 0.0)
node1 = mapper_helper.clip_helper(graph, node0, node.attr('threshold'),
0.0)
node2 = graph.make_node('Mul', inputs=[node.input('X')[0], node1])
node3 = graph.make_node(
'Div', inputs=[node2, scale_node], outputs=node.output('Out'))
12 changes: 12 additions & 0 deletions paddle2onnx/op_mapper/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,18 @@ def opset_1(cls, graph, node, **kw):
outputs=node.output('Out'))


@op_mapper('logical_xor')
class LogicalXor():
support_opset_version_range = (7, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
graph.make_node(
'Xor',
inputs=[node.input('X', 0), node.input('Y', 0)],
outputs=node.output('Out'))


@op_mapper('less_equal')
class LessOrEqual():
support_opset_verison_range = (12, )
Expand Down
111 changes: 111 additions & 0 deletions paddle2onnx/op_mapper/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,16 @@ def opset_9(cls, graph, node, **kw):
'Cosh', inputs=node.input('X'), outputs=node.output('Out'))


@op_mapper('sin')
class Sin():
supports_opset_version_range = (7, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
graph.make_node(
'Sin', inputs=node.input('X'), outputs=node.output('Out'))


@op_mapper(
[
'elementwise_add',
Expand Down Expand Up @@ -423,6 +433,76 @@ def opset_1(cls, graph, node, **kw):
'Floor', inputs=node.input('X'), outputs=node.output('Out'))


@op_mapper('log10')
class Log10():
support_opset_version_range = (7, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
ten = graph.make_node(
'Constant', attrs={
'dtype': dtypes.ONNX.FLOAT,
'value': [10]
})
ln10 = graph.make_node('Log', inputs=[ten])
lnx = graph.make_node('Log', inputs=node.input('X'))
graph.make_node('Div', inputs=[lnx, ln10], outputs=node.output('Out'))


@op_mapper('log1p')
class Log1p():
support_opset_version_range = (7, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
one = graph.make_node(
'Constant', attrs={
'dtype': dtypes.ONNX.FLOAT,
'value': [1]
})
add_node = graph.make_node('Add', inputs=[node.input('X', 0), one])
graph.make_node('Log', inputs=add_node, outputs=node.output('Out'))


@op_mapper(['reduce_all', 'reduce_any'],
mapper_dict={
'reduce_all': 'ReduceMin',
'reduce_any': 'ReduceMax'
})
class ReduceAll():
support_opset_version_range = (6, 12)

@classmethod
def opset_6(cls, graph, node, **kw):
op_type = kw['mapper_dict'][node.type]

all_node = graph.make_node(
'Cast', inputs=[node.input('X', 0)], to=dtypes.ONNX.FLOAT)
if node.attr('reduce_all'):
flatten_x = graph.make_node('Flatten', inputs=all_node, axis=0)
squeeze_node = graph.make_node('Squeeze', inputs=[flatten_x])
if node.attr('keep_dim'):
unsqueeze_node = graph.make_node(op_type, inputs=squeeze_node)
for i in range(len(node.input_shape('X', 0)) - 1):
unsqueeze_node = graph.make_node(
'Unsqueeze', axes=[0], inputs=[unsqueeze_node])
graph.make_node(
'Cast',
inputs=[unsqueeze_node],
to=dtypes.ONNX.FLOAT,
outputs=node.output('Out'))
else:
graph.make_node(
op_type, inputs=squeeze_node, outputs=node.output('Out'))
else:
graph.make_node(
op_type,
inputs=all_node,
keepdims=node.attr('keep_dim'),
axes=node.attr('dim'),
outputs=node.output('Out'))


@op_mapper(
['reduce_mean', 'reduce_sum', 'reduce_min', 'reduce_max', 'reduce_prod'],
mapper_dict={
Expand Down Expand Up @@ -526,6 +606,37 @@ def opset_1(cls, graph, node, **kw):
keepdims=0)


@op_mapper('round')
class Round():
support_opset_version_range = (11, 12)

@classmethod
def opset_11(cls, graph, node, **kw):
graph.make_node(
'Round', inputs=node.input('X'), outputs=node.output('Out'))


@op_mapper('rsqrt')
class Rsqrt():
support_opset_version_range = (6, 12)

@classmethod
def opset_6(cls, graph, node, **kw):
sqrt_node = graph.make_node('Sqrt', inputs=node.input('X'))
graph.make_node(
'Reciprocal', inputs=sqrt_node, outputs=node.output('Out'))


@op_mapper('sign')
class Sign():
support_opset_version_range = (9, 12)

@classmethod
def opset_9(cls, graph, node, **kw):
graph.make_node(
'Sign', inputs=node.input('X'), outputs=node.output('Out'))


#
#@op_mapper('scale')
#class Scale():
Expand Down
10 changes: 10 additions & 0 deletions paddle2onnx/op_mapper/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,16 @@ def opset_9(cls, graph, node, **kw):
lambd=node.attr('threshold'))


@op_mapper('logsigmoid')
class LogSigmoid():
support_opset_version_range = (1, 12)

@classmethod
def opset_1(cls, graph, node, **kw):
sigmoid_node = graph.make_node('Sigmoid', inputs=node.input('X'))
graph.make_node('Log', inputs=sigmoid_node, outputs=node.output('Out'))


@op_mapper('norm')
class Norm():
support_opset_verison_range = (1, 12)
Expand Down
125 changes: 125 additions & 0 deletions tests/test_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import paddle
from onnxbase import APIOnnx
from onnxbase import randtool


class Net(paddle.nn.Layer):
"""
simplr Net
"""

def __init__(self, axis=None, keepdim=False):
super(Net, self).__init__()
self.axis = axis
self.keepdim = keepdim

def forward(self, inputs):
"""
forward
"""
x = paddle.all(inputs, axis=self.axis, keepdim=self.keepdim)
return x.astype('float32')


def test_all_10():
"""
api: paddle.all
op version: 10
"""
op = Net()
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [10])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool')))
obj.run()


def test_all_11():
"""
api: paddle.all
op version: 11
"""
op = Net()
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [11])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool')))
obj.run()


def test_all_12():
"""
api: paddle.all
op version: 12
"""
op = Net()
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [12])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [3, 10]).astype('bool')))
obj.run()


def test_all_keepdim():
"""
api: paddle.all
op version: 12
"""
op = Net(keepdim=True)
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [12])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool')))
obj.run()


def test_all_axis():
"""
api: paddle.all
op version: 12
"""
op = Net(axis=1)
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [12])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool')))
obj.run()


def test_all_axis_keepdim():
"""
api: paddle.all
op version: 12
"""
op = Net(axis=1, keepdim=True)
op.eval()
# net, name, ver_list, delta=1e-6, rtol=1e-5
obj = APIOnnx(op, 'all', [12])
obj.set_input_data(
"input_data",
paddle.to_tensor(randtool("float", -1, 1, [4, 3, 10]).astype('bool')))
obj.run()
Loading

0 comments on commit d81df05

Please sign in to comment.