Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

test coverage for leakyrelu elemwise_add concat activation #19687

Merged
merged 6 commits into from
Dec 18, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion python/mxnet/contrib/onnx/mx2onnx/_op_translations.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,7 +829,7 @@ def convert_leakyrelu(node, **kwargs):
create_const_scalar_node(name+"_half", np.float32(0.5), kwargs),
make_node("Add", [name+"_erf0_out", name+"_one"], [name+"_add0_out"]),
make_node("Mul", [input_nodes[0], name+"_add0_out"], [name+"_mul0_out"]),
make_node("Mul", [name+"_mul0_out", name+"_half"], [name])
make_node("Mul", [name+"_mul0_out", name+"_half"], [name], name=name)
]
return nodes
else:
Expand Down
40 changes: 40 additions & 0 deletions tests/python-pytest/onnx/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def test_onnx_export_broadcast_axis(tmp_path, dtype):
op_export_test('broadcast_axis_3', M3, [x2], tmp_path)


#TODO: onnxruntime does not support float64 for Where
@pytest.mark.parametrize('dtype', ['float32'])
def test_onnx_export_SequenceMask(tmp_path, dtype):
M1 = def_model('SequenceMask', use_sequence_length=True, axis=1, value=-5)
Expand Down Expand Up @@ -207,6 +208,45 @@ def test_onnx_export_fully_connected(tmp_path, dtype, num_hidden, no_bias, flatt
op_export_test('FullyConnected', M, args, tmp_path)


#TODO: onnxruntime does not support float64 for the relu opertors
@pytest.mark.parametrize('dtype', ['float32', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
@pytest.mark.parametrize('act_type', ['elu', 'leaky', 'prelu', 'selu', 'gelu'])
def test_onnx_export_LeakyReLU(tmp_path, dtype, shape, act_type):
M = def_model('LeakyReLU', act_type='leaky')
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('LeakyReLU', M, [x], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'float16', 'int32', 'int64'])
def test_onnx_export_Concat(tmp_path, dtype):
x = mx.nd.array([[1,1],[2,2]], dtype=dtype)
y = mx.nd.array([[3,3],[4,4],[5,5]], dtype=dtype)
z = mx.nd.array([[6,6],[7,7],[8,8]], dtype=dtype)
M1 = def_model('Concat', dim=0)
M2 = def_model('Concat', dim=1)
op_export_test('Concat_1', M1, [x, y, z], tmp_path)
op_export_test('Concat_2', M2, [y, z], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
def test_onnx_export_elemwise_add(tmp_path, dtype, shape):
M = def_model('elemwise_add')
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
y = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('elmwise_add', M, [x, y], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
@pytest.mark.parametrize('act_type', ['tanh', 'relu', 'sigmoid', 'softrelu', 'softsign'])
def test_onnx_export_Activation(tmp_path, dtype, shape, act_type):
M = def_model('Activation', act_type=act_type)
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('Activation', M, [x], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32', 'int64'])
@pytest.mark.parametrize('axes', [None, [1,0,2]])
def test_onnx_export_transpose(tmp_path, dtype, axes):
Expand Down