Skip to content

Commit

Permalink
Protect against dangerous default value. (#268)
Browse files Browse the repository at this point in the history
* Protect against dangerous default value.

* Added comments.
  • Loading branch information
yaochaorui authored and wusize committed Nov 13, 2020
1 parent eab4515 commit 8950ce5
Show file tree
Hide file tree
Showing 11 changed files with 64 additions and 0 deletions.
4 changes: 4 additions & 0 deletions mmpose/models/backbones/cpm.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch
import torch.nn as nn
from mmcv.cnn import ConvModule, constant_init, normal_init
Expand Down Expand Up @@ -75,6 +77,8 @@ def __init__(self,
middle_channels=32,
num_stages=6,
norm_cfg=dict(type='BN', requires_grad=True)):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()

assert in_channels == 3
Expand Down
6 changes: 6 additions & 0 deletions mmpose/models/backbones/hourglass.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch.nn as nn
from mmcv.cnn import ConvModule, constant_init, normal_init
from torch.nn.modules.batchnorm import _BatchNorm
Expand Down Expand Up @@ -28,6 +30,8 @@ def __init__(self,
stage_channels,
stage_blocks,
norm_cfg=dict(type='BN', requires_grad=True)):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()

self.depth = depth
Expand Down Expand Up @@ -119,6 +123,8 @@ def __init__(self,
stage_blocks=(2, 2, 2, 2, 2, 4),
feat_channel=256,
norm_cfg=dict(type='BN', requires_grad=True)):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()

self.num_stacks = num_stacks
Expand Down
7 changes: 7 additions & 0 deletions mmpose/models/backbones/hrnet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch.nn as nn
from mmcv.cnn import (build_conv_layer, build_norm_layer, constant_init,
normal_init)
Expand Down Expand Up @@ -26,6 +28,9 @@ def __init__(self,
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN')):

# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
self._check_branches(num_branches, num_blocks, in_channels,
num_channels)
Expand Down Expand Up @@ -269,6 +274,8 @@ def __init__(self,
norm_eval=False,
with_cp=False,
zero_init_residual=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
self.extra = extra
self.conv_cfg = conv_cfg
Expand Down
7 changes: 7 additions & 0 deletions mmpose/models/backbones/mobilenet_v2.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import copy
import logging

import torch.nn as nn
Expand Down Expand Up @@ -38,6 +39,9 @@ def __init__(self,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU6'),
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.stride = stride
assert stride in [1, 2], f'stride must in [1, 2]. ' \
Expand Down Expand Up @@ -131,6 +135,9 @@ def __init__(self,
act_cfg=dict(type='ReLU6'),
norm_eval=False,
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.widen_factor = widen_factor
self.out_indices = out_indices
Expand Down
3 changes: 3 additions & 0 deletions mmpose/models/backbones/mobilenet_v3.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import copy
import logging

import torch.nn as nn
Expand Down Expand Up @@ -70,6 +71,8 @@ def __init__(self,
frozen_stages=-1,
norm_eval=False,
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
assert arch in self.arch_settings
for index in out_indices:
Expand Down
4 changes: 4 additions & 0 deletions mmpose/models/backbones/regnet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import numpy as np
import torch.nn as nn
from mmcv.cnn import build_conv_layer, build_norm_layer
Expand Down Expand Up @@ -98,6 +100,8 @@ def __init__(self,
norm_eval=False,
with_cp=False,
zero_init_residual=True):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super(ResNet, self).__init__()

# Generate RegNet parameters first
Expand Down
10 changes: 10 additions & 0 deletions mmpose/models/backbones/resnet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import (ConvModule, build_conv_layer, build_norm_layer,
Expand Down Expand Up @@ -42,6 +44,8 @@ def __init__(self,
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN')):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
Expand Down Expand Up @@ -157,6 +161,8 @@ def __init__(self,
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN')):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
assert style in ['pytorch', 'caffe']

Expand Down Expand Up @@ -336,6 +342,8 @@ def __init__(self,
norm_cfg=dict(type='BN'),
downsample_first=True,
**kwargs):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
self.block = block
self.expansion = get_expansion(block, expansion)

Expand Down Expand Up @@ -493,6 +501,8 @@ def __init__(self,
norm_eval=False,
with_cp=False,
zero_init_residual=True):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()
if depth not in self.arch_settings:
raise KeyError(f'invalid depth {depth} for resnet')
Expand Down
4 changes: 4 additions & 0 deletions mmpose/models/backbones/scnet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch
import torch.nn as nn
import torch.nn.functional as F
Expand Down Expand Up @@ -29,6 +31,8 @@ def __init__(self,
pooling_r,
conv_cfg=None,
norm_cfg=dict(type='BN', momentum=0.1)):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
super().__init__()

assert in_channels == out_channels
Expand Down
7 changes: 7 additions & 0 deletions mmpose/models/backbones/shufflenet_v1.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import copy
import logging

import torch
Expand Down Expand Up @@ -52,6 +53,9 @@ def __init__(self,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
Expand Down Expand Up @@ -184,6 +188,9 @@ def __init__(self,
act_cfg=dict(type='ReLU'),
norm_eval=False,
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.stage_blocks = [4, 8, 4]
self.groups = groups
Expand Down
7 changes: 7 additions & 0 deletions mmpose/models/backbones/shufflenet_v2.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import copy
import logging

import torch
Expand Down Expand Up @@ -36,6 +37,9 @@ def __init__(self,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.stride = stride
self.with_cp = with_cp
Expand Down Expand Up @@ -159,6 +163,9 @@ def __init__(self,
act_cfg=dict(type='ReLU'),
norm_eval=False,
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.stage_blocks = [4, 8, 4]
for index in out_indices:
Expand Down
5 changes: 5 additions & 0 deletions mmpose/models/backbones/utils/inverted_residual.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import copy

import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import ConvModule
Expand Down Expand Up @@ -45,6 +47,9 @@ def __init__(self,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
with_cp=False):
# Protect mutable default arguments
norm_cfg = copy.deepcopy(norm_cfg)
act_cfg = copy.deepcopy(act_cfg)
super().__init__()
self.with_res_shortcut = (stride == 1 and in_channels == out_channels)
assert stride in [1, 2]
Expand Down

0 comments on commit 8950ce5

Please sign in to comment.