Skip to content

Commit

Permalink
[Relay] Conv2D padding representation (apache#4787)
Browse files Browse the repository at this point in the history
* enforce 4-way padding

* add util with get_pad_tuple

* delete unnecessary arguments

* fix lint

* add container.Array case

* fix cudnn conv2d asymmetric padding logic

* rename get_pad_tuple to get_pad_tuple2d

* revert change for topi/python/topi/nn/conv2d.py

* add get_pad_tuple2d for several contrib conv2d ops

* add get_pad_tuple2d for all conv2d ops
  • Loading branch information
Xingyu Zhou authored and zhiics committed Mar 2, 2020
1 parent ca92d17 commit f7c2fd6
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 16 deletions.
23 changes: 20 additions & 3 deletions python/tvm/relay/op/nn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from __future__ import absolute_import as _abs
from ...expr import TupleWrapper
from . import _make
from .util import get_pad_tuple2d


def conv1d(data,
Expand Down Expand Up @@ -200,8 +201,9 @@ def conv2d(data,
strides = (strides, strides)
if isinstance(dilation, int):
dilation = (dilation, dilation)
if isinstance(padding, int):
padding = (padding, padding)
# TODO enforce 4-way padding in topi/nn/conv2d after #4644 merged
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)

return _make.conv2d(data, weight, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
Expand Down Expand Up @@ -363,6 +365,8 @@ def conv2d_transpose(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.conv2d_transpose(data, weight, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, output_padding, out_dtype)
Expand Down Expand Up @@ -1758,6 +1762,8 @@ def contrib_conv2d_winograd_without_weight_transform(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.contrib_conv2d_winograd_without_weight_transform(
data, weight, tile_size, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
Expand Down Expand Up @@ -1824,6 +1830,8 @@ def contrib_conv2d_winograd_nnpack_without_weight_transform(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.contrib_conv2d_winograd_nnpack_without_weight_transform(
data, weight, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
Expand Down Expand Up @@ -1891,6 +1899,8 @@ def contrib_conv2d_nchwc(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.contrib_conv2d_NCHWc(data, kernel, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
Expand Down Expand Up @@ -1956,6 +1966,8 @@ def contrib_depthwise_conv2d_nchwc(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.contrib_depthwise_conv2d_NCHWc(data, kernel, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
Expand Down Expand Up @@ -2021,6 +2033,8 @@ def contrib_conv2d_nchwc_int8(data,
result : tvm.relay.Expr
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.contrib_conv2d_NCHWc_int8(data, kernel, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
Expand Down Expand Up @@ -2142,6 +2156,8 @@ def deformable_conv2d(data,
The computed result.
"""
# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.deformable_conv2d(data, offset, weight, strides, padding, dilation,
deformable_groups, groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
Expand Down Expand Up @@ -2251,7 +2267,8 @@ def bitserial_conv2d(data,
result : tvm.relay.Expr
The computed result.
"""

# convert 2-way padding to 4-way padding
padding = get_pad_tuple2d(padding)
return _make.bitserial_conv2d(data, weight, strides, padding, channels,
kernel_size, activation_bits, weight_bits,
data_layout, kernel_layout, pack_dtype,
Expand Down
56 changes: 56 additions & 0 deletions python/tvm/relay/op/nn/util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-variable
"""NN operator common utilities"""
from __future__ import absolute_import
from .... import container

def get_pad_tuple2d(padding):
"""Common code to get the pad option
Parameters
----------
padding : Union[int, Tuple[int, ...]]
Padding size
Returns
-------
pad_top : int
Padding size on top
pad_left : int
Padding size on left
pad_down : int
Padding size on down.
pad_right : int
Padding size on right.
"""
# compute the padding size
if isinstance(padding, container.Array):
padding = list(padding)
if isinstance(padding, (tuple, list)):
if len(padding) == 2:
pad_h = padding[0] * 2
pad_w = padding[1] * 2
elif len(padding) == 4:
return padding[0], padding[1], padding[2], padding[3]
else:
raise ValueError("Size of padding can only be 2 or 4")
elif isinstance(padding, int):
pad_h = pad_w = padding * 2
else:
raise ValueError("Unknown padding option %s" % padding)
pad_top = (pad_h + 1) // 2
pad_left = (pad_w + 1) // 2
return pad_top, pad_left, pad_h - pad_top, pad_w - pad_left
2 changes: 1 addition & 1 deletion tests/python/relay/test_pass_alter_op_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ def expected():
w = relay.var("w", shape=(32, 1, 3, 3))
x = relay.layout_transform(x, "NCHW", "NCHW8c")
w = relay.layout_transform(w, "OIHW", "OIHW1i8o")
y = relay.nn.contrib_depthwise_conv2d_nchwc(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3),
y = relay.nn.contrib_depthwise_conv2d_nchwc(x, w, padding=(1, 1, 1, 1), channels=32, kernel_size=(3, 3),
groups=32, data_layout="NCHW8c", kernel_layout="OIHW1i8o",
out_layout="NCHW8c")
y = relay.layout_transform(y, "NCHW8c", "NCHW")
Expand Down
22 changes: 11 additions & 11 deletions tests/python/unittest/test_graph_tuner_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ def _create_data(target, dshape, dtype, layout):
params=params,
ops=(relay.op.nn.conv2d,))
wkl_list = [
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 32, 8, 8), (32, 32, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0, 0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 32, 8, 8), (32, 32, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
]
costs = [0.04, 0.012, 0.03]
config_list = []
Expand Down Expand Up @@ -279,9 +279,9 @@ def test_many_sub_graphs():
params=params,
ops=(relay.op.nn.conv2d,))
wkl_list = [
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 32, 8, 8), (32, 32, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0, 0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 32, 8, 8), (32, 32, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
]
costs = [0.04, 0.012, 0.03, 0.02, 0.02, 0.045]
config_list = []
Expand Down Expand Up @@ -392,8 +392,8 @@ def test_tuple():
params=params,
ops=(relay.op.nn.conv2d,))
wkl_list = [
create_workload((1, 5, 32, 32), (2, 5, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 5, 32, 32), (3, 5, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 5, 32, 32), (2, 5, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 5, 32, 32), (3, 5, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
]
costs = [0.01, 0.012, 0.03, 0.04]
config_list = []
Expand Down Expand Up @@ -490,9 +490,9 @@ def test_triangle_block():
params=params,
ops=(relay.op.nn.conv2d,))
wkl_list = [
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 3, 8, 8), (32, 3, 3, 3), (1, 1), (1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 3, 8, 8), (16, 3, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 16, 8, 8), (32, 16, 1, 1), (1, 1), (0, 0, 0, 0), (1, 1), layout, layout, dtype, dtype),
create_workload((1, 3, 8, 8), (32, 3, 3, 3), (1, 1), (1, 1, 1, 1), (1, 1), layout, layout, dtype, dtype),
]
costs = [0.04, 0.012, 0.03, 0.02, 0.02, 0.045]
config_list = []
Expand Down
3 changes: 2 additions & 1 deletion topi/python/topi/cuda/conv2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,8 @@ def conv2d_cuda(cfg, data, kernel, strides, padding, dilation, layout='NCHW', ou
stride_h, stride_w = (strides, strides) if isinstance(strides, int) else strides
dilation_h, dilation_w = (dilation, dilation) if isinstance(dilation, int) else dilation

if isinstance(padding, (list, tuple)) and len(padding) > 2:
if isinstance(padding, (list, tuple)) and len(padding) == 4 and \
(padding[0] != padding[2] or padding[1] != padding[3]):
raise ValueError("Cudnn doesn't support asymmetric padding.")
pt, pl, pb, pr = get_pad_tuple(padding, (KH, KW))
OH = (H + pt + pb - KH) // stride_h + 1
Expand Down

0 comments on commit f7c2fd6

Please sign in to comment.