From 6a8098731d6316bdc63c3b64753d7848d2fbc06a Mon Sep 17 00:00:00 2001 From: Sheng Zha Date: Thu, 2 Aug 2018 13:31:53 -0700 Subject: [PATCH] remove hybridize --- tests/python/unittest/test_gluon_rnn.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/python/unittest/test_gluon_rnn.py b/tests/python/unittest/test_gluon_rnn.py index 15f85003276f..99a4f1a82480 100644 --- a/tests/python/unittest/test_gluon_rnn.py +++ b/tests/python/unittest/test_gluon_rnn.py @@ -402,7 +402,6 @@ def test_rnn_layers(): net.add(gluon.nn.BatchNorm(axis=2)) net.add(gluon.nn.Flatten()) net.add(gluon.nn.Dense(3, activation='relu')) - net.hybridize() net.collect_params().initialize() with mx.autograd.record(): net(mx.nd.ones((2, 3, 10))).backward() @@ -412,7 +411,6 @@ def test_rnn_layers(): net2.add(gluon.nn.BatchNorm(axis=2)) net2.add(gluon.nn.Flatten()) net2.add(gluon.nn.Dense(3, activation='relu')) - net2.hybridize() net2.collect_params().initialize() with mx.autograd.record(): net2(mx.nd.ones((2, 3, 10))).backward() @@ -502,7 +500,6 @@ def test_cell_fill_shape(): @assert_raises_cudnn_disabled() def test_layer_fill_shape(): layer = gluon.rnn.LSTM(10) - layer.hybridize() check_rnn_layer_forward(layer, mx.nd.ones((3, 2, 7))) print(layer) assert layer.l0_i2h_weight.shape[1] == 7, layer.l0_i2h_weight.shape[1]