From 937c9630d4870acf10e643da2057071c88353928 Mon Sep 17 00:00:00 2001 From: Yuwei Hu Date: Sun, 6 Jan 2019 17:56:12 +0800 Subject: [PATCH 1/2] [Relay][Keras] force const dtype to be float32 --- python/tvm/relay/frontend/keras.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 0c55c7595002..f2b163ed0214 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -28,7 +28,7 @@ def _get_pad_pair(input1d, kernel1d, stride1d): def _get_elu(inexpr, alpha): """A helper method for elu.""" - return _op.negative(alpha) * _op.nn.relu(_expr.const(1.) - \ + return _op.negative(alpha) * _op.nn.relu(_expr.const(1., dtype='float32') - \ _op.exp(inexpr)) + _op.nn.relu(inexpr) @@ -69,7 +69,7 @@ def _convert_activation(inexpr, keras_layer, _): elif act_type == 'relu': return _op.nn.relu(inexpr) elif act_type == 'softplus': - return _op.log(_op.add(_op.exp(inexpr), _expr.const(1.))) + return _op.log(_op.add(_op.exp(inexpr), _expr.const(1., dtype='float32'))) elif act_type == 'elu': alpha = keras_layer.alpha if hasattr(keras_layer, 'alpha') else 1. alpha = _expr.const(alpha, dtype='float32') @@ -86,9 +86,9 @@ def _convert_activation(inexpr, keras_layer, _): elif act_type == 'relu6': return _op.clip(inexpr, a_min=0., a_max=6.) elif act_type == 'softsign': - return inexpr / (_expr.const(1.) + _op.abs(inexpr)) + return inexpr / (_expr.const(1., dtype='float32') + _op.abs(inexpr)) elif act_type == 'hard_sigmoid': - transformX = (_expr.const(0.2) * inexpr) + _expr.const(0.5) + transformX = (_expr.const(0.2, dtype='float32') * inexpr) + _expr.const(0.5, dtype='float32') return _op.clip(transformX, a_min=0., a_max=1.) else: raise TypeError("Unsupported activation type : {}".format(act_type)) @@ -522,7 +522,7 @@ def _convert_gru(inexpr, keras_layer, etab): recurrent_h = _op.nn.dense(rec_act_r * h_tm1_op, rec_weights[1], units=units) act_hh = _convert_activation(x_h + recurrent_h, keras_layer, None) # previous and candidate state mixed by update gate - output = rec_act_z * h_tm1_op + (_expr.const(1.) - rec_act_z) * act_hh + output = rec_act_z * h_tm1_op + (_expr.const(1., dtype='float32') - rec_act_z) * act_hh out_shape = tuple(dim if dim else 1 for dim in _as_list(keras_layer.output_shape)[0]) output = _op.reshape(output, newshape=out_shape) return [output, output] From 210a2140104e2d76d8402bd6032358df337045f1 Mon Sep 17 00:00:00 2001 From: Yuwei Hu Date: Sun, 6 Jan 2019 18:04:21 +0800 Subject: [PATCH 2/2] fix pylint --- python/tvm/relay/frontend/keras.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index f2b163ed0214..868b46b74d4f 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -88,8 +88,8 @@ def _convert_activation(inexpr, keras_layer, _): elif act_type == 'softsign': return inexpr / (_expr.const(1., dtype='float32') + _op.abs(inexpr)) elif act_type == 'hard_sigmoid': - transformX = (_expr.const(0.2, dtype='float32') * inexpr) + _expr.const(0.5, dtype='float32') - return _op.clip(transformX, a_min=0., a_max=1.) + x = (_expr.const(0.2, dtype='float32') * inexpr) + _expr.const(0.5, dtype='float32') + return _op.clip(x, a_min=0., a_max=1.) else: raise TypeError("Unsupported activation type : {}".format(act_type))