Skip to content

Commit

Permalink
[Feature] Register a new activatation layer SiLU to ACTIVATION_LAYERS (
Browse files Browse the repository at this point in the history
  • Loading branch information
okotaku authored Sep 13, 2022
1 parent 2046a39 commit 55c51e1
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 0 deletions.
3 changes: 3 additions & 0 deletions mmcv/cnn/bricks/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
]:
ACTIVATION_LAYERS.register_module(module=module)

if digit_version(torch.__version__) >= digit_version('1.7.0'):
ACTIVATION_LAYERS.register_module(module=nn.SiLU)


@ACTIVATION_LAYERS.register_module(name='Clip')
@ACTIVATION_LAYERS.register_module()
Expand Down
21 changes: 21 additions & 0 deletions tests/test_cnn/test_silu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
import torch.nn.functional as F

from mmcv.cnn.bricks import build_activation_layer
from mmcv.utils import digit_version


@pytest.mark.skipif(
digit_version(torch.__version__) < digit_version('1.7.0'),
reason='torch.nn.SiLU is not available before 1.7.0')
def test_silu():
act = build_activation_layer(dict(type='SiLU'))
input = torch.randn(1, 3, 64, 64)
expected_output = F.silu(input)
output = act(input)
# test output shape
assert output.shape == expected_output.shape
# test output value
assert torch.equal(output, expected_output)

0 comments on commit 55c51e1

Please sign in to comment.