Skip to content

Commit

Permalink
Add test for BatchNorm running variables synchronization
Browse files Browse the repository at this point in the history
  • Loading branch information
anko-intel committed Jun 30, 2020
1 parent 2158106 commit 0044dd9
Showing 1 changed file with 28 additions and 0 deletions.
28 changes: 28 additions & 0 deletions tests/python/unittest/test_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,6 +665,34 @@ def transpose(shape):
assert (layer(x).shape==ceil_out_shape)


@with_seed()
@pytest.mark.parametrize('cudnn_off', [True, False])
@pytest.mark.parametrize('variable', ['running_var', 'running_mean'])
def test_batchnorm_backward_synchronization(cudnn_off, variable):
"""
Tests if synchronization of BatchNorm running variables is done correctly.
If not, the test sometimes fails - depending on the timing.
"""
ctx = mx.cpu() if cudnn_off else mx.gpu()
read_op = 'layer.' + variable + '.data().asnumpy()'

for _ in range(20):
layer = nn.BatchNorm()
layer.initialize(ctx=ctx)
for _ in range(3):
data = mx.nd.random.normal(loc=10, scale=2, shape=(1, 3, 10, 10), ctx=ctx)
with mx.autograd.record():
out = layer(data)
out.backward()

# check if each read give the same value
var1 = eval(read_op)
for _ in range(10):
var2 = eval(read_op)
if (var1 != var2).any():
raise AssertionError("Two consecutive reads of " + variable + " give different results")


@with_seed()
def test_batchnorm():
layer = nn.BatchNorm(in_channels=10)
Expand Down

0 comments on commit 0044dd9

Please sign in to comment.