Skip to content

Commit

Permalink
switch to the explicit padding if the input shape is available. (#198)
Browse files Browse the repository at this point in the history
* switch to the explicit padding if the input shape is available.

* upgrade the version.

* fix for the nchw mode.

* output_padding could be None.
  • Loading branch information
wenbingl authored Dec 13, 2018
1 parent 4b0c0f1 commit c40305a
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 11 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
# setup.py intermediates
.eggs
*.egg-info/
dist
dist/
build/

# PyCharm files
.idea
Expand Down
2 changes: 1 addition & 1 deletion onnxmltools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
This framework converts any machine learned model into onnx format
which is a common language to describe any machine learned model.
"""
__version__ = "1.3.0"
__version__ = "1.3.1"
__author__ = "Microsoft"
__producer__ = "OnnxMLTools"
__producer_version__ = __version__
Expand Down
40 changes: 31 additions & 9 deletions onnxmltools/convert/keras/operator_converters/Conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,23 @@
from .Dense import _activation_map


def _calc_explicit_padding(input_size, output_shape, output_padding, kernel_shape, stride, dilation, perm):
to_nchw = lambda x, perm: [x[perm[n_]] for n_ in range(len(x))]
input_size = to_nchw(input_size, perm)[2:]
output_shape = to_nchw(output_shape, perm)[2:]

spatial = len(kernel_shape)
total_padding = []
pads = [None] * 2 * spatial
for i in range(spatial):
total_padding[i:] = [stride[i] * (output_shape[i] - 1) +
output_padding[i] + kernel_shape[i] * dilation[i] - input_size[i]]
pads[i] = total_padding[i] // 2
pads[i + spatial] = total_padding[i] - (total_padding[i] // 2)

return pads


def convert_keras_conv_core(scope, operator, container, is_transpose, n_dims, input_perm_axes,
output_perm_axes, weight_perm_axes):
op = operator.raw_operator
Expand Down Expand Up @@ -69,22 +86,27 @@ def convert_keras_conv_core(scope, operator, container, is_transpose, n_dims, in
attrs['dilations'] = list(op.dilation_rate)
attrs['strides'] = list(op.strides)
attrs['kernel_shape'] = op.kernel_size
# Fix this...
attrs['group'] = group

if op.padding == 'valid':
attrs['auto_pad'] = 'VALID'
elif op.padding == 'same':
if is_transpose: # bypass onnx engine issue on convtranpose support.
attrs['auto_pad'] = 'SAME_LOWER'
shape = [-1 if i is None else i for i in op.output_shape]
if channels_first:
attrs['output_shape'] = shape
if op.input_shape.count(None) > 1:
if is_transpose:
attrs['auto_pad'] = 'SAME_LOWER' # the controversial def in onnx spec.
else:
attrs['output_shape'] = shape[0:1] + shape[-1:] + shape[1:-1]

attrs['auto_pad'] = 'SAME_UPPER'
else:
attrs['auto_pad'] = 'SAME_LOWER'
output_padding = [0] * len(op.kernel_size)
if hasattr(op, 'output_padding') and op.output_padding is not None:
output_padding = op.output_padding
attrs['pads'] = _calc_explicit_padding(op.output_shape if is_transpose else op.input_shape,
op.input_shape if is_transpose else op.output_shape,
output_padding,
op.kernel_size,
op.strides,
op.dilation_rate,
list(range(len(op.input_shape))) if channels_first else input_perm_axes)
else:
raise RuntimeError("Unsupported padding type '{}'".format(op.padding))

Expand Down

0 comments on commit c40305a

Please sign in to comment.