Skip to content

Commit a39c3ee

Browse files
authored
Merge branch 'master' into eca-weights
2 parents e9d6fe2 + 3a7aa95 commit a39c3ee

File tree

2 files changed

+11
-1
lines changed

2 files changed

+11
-1
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
* Remove separate tiered (`t`) vs tiered_narrow (`tn`) ResNet model defs, all `tn` changed to `t` and `t` models removed (`seresnext26t_32x4d` only model w/ weights that was removed).
1111
* Support model default_cfgs with separate train vs test resolution `test_input_size`
1212

13-
### Jan 30, 2012
13+
### Jan 30, 2021
1414
* Add initial "Normalization Free" NF-RegNet-B* and NF-ResNet model definitions based on [paper](https://arxiv.org/abs/2101.08692)
1515

1616
### Jan 25, 2021

timm/models/layers/activations_me.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,9 @@ class SwishJitAutoFn(torch.autograd.Function):
3030
Inspired by conversation btw Jeremy Howard & Adam Pazske
3131
https://twitter.com/jeremyphoward/status/1188251041835315200
3232
"""
33+
@staticmethod
34+
def symbolic(g, x):
35+
return g.op("Mul", x, g.op("Sigmoid", x))
3336

3437
@staticmethod
3538
def forward(ctx, x):
@@ -152,6 +155,13 @@ def backward(ctx, grad_output):
152155
x = ctx.saved_tensors[0]
153156
return hard_swish_jit_bwd(x, grad_output)
154157

158+
@staticmethod
159+
def symbolic(g, self):
160+
input = g.op("Add", self, g.op('Constant', value_t=torch.tensor(3, dtype=torch.float)))
161+
hardtanh_ = g.op("Clip", input, g.op('Constant', value_t=torch.tensor(0, dtype=torch.float)), g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))
162+
hardtanh_ = g.op("Div", hardtanh_, g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))
163+
return g.op("Mul", self, hardtanh_)
164+
155165

156166
def hard_swish_me(x, inplace=False):
157167
return HardSwishJitAutoFn.apply(x)

0 commit comments

Comments
 (0)