Skip to content

Errors occur in LrUpdaterHook when multiple optimizers are introduced #887

@wuyuuu

Description

@wuyuuu

Hi,

I'm using mmpose with multiple optimizers and I got errors in LrUpdateHook

Traceback (most recent call last):
  File "/home2/wuy/.pycharm_helpers/pydev/pydevd.py", line 1477, in _exec
    pydev_imports.execfile(file, globals, locals)  # execute the script
  File "/home2/wuy/.pycharm_helpers/pydev/_pydev_imps/_pydev_execfile.py", line 18, in execfile
    exec(compile(contents+"\n", file, 'exec'), glob, loc)
  File "/home2/wuy/project/mmpose/tools/model_search.py", line 169, in <module>
    main()
  File "/home2/wuy/project/mmpose/tools/model_search.py", line 165, in main
    meta=meta)
  File "/home2/wuy/project/mmpose/mmpose/apis/model_search.py", line 151, in search_model
    runner.run(data_loaders, cfg.workflow, cfg.total_epochs)
  File "/home2/wuy/project/mmpose/mmpose/apis/my_epoch_runner.py", line 134, in run
    epoch_runner(data_loaders[i], **kwargs)
  File "/home2/wuy/project/mmpose/mmpose/apis/my_epoch_runner.py", line 51, in train
    self.call_hook('before_train_iter')
  File "/home2/wuy/anaconda3/lib/python3.7/site-packages/mmcv/runner/base_runner.py", line 308, in call_hook
    getattr(hook, fn_name)(self)
  File "/home2/wuy/anaconda3/lib/python3.7/site-packages/mmcv/runner/hooks/lr_updater.py", line 139, in before_train_iter
    warmup_lr = self.get_warmup_lr(cur_iter)
  File "/home2/wuy/anaconda3/lib/python3.7/site-packages/mmcv/runner/hooks/lr_updater.py", line 88, in get_warmup_lr
    warmup_lr = [_lr * (1 - k) for _lr in self.regular_lr]
  File "/home2/wuy/anaconda3/lib/python3.7/site-packages/mmcv/runner/hooks/lr_updater.py", line 88, in <listcomp>
    warmup_lr = [_lr * (1 - k) for _lr in self.regular_lr]
TypeError: can't multiply sequence by non-int of type 'float'

It seems to me it is because get_warmup_lr does not work for multiple optimizers as self.regular_lr is a list for single optimizer while it is a dict for multiple optimizers.

def get_warmup_lr(self, cur_iters):
if self.warmup == 'constant':
warmup_lr = [_lr * self.warmup_ratio for _lr in self.regular_lr]
elif self.warmup == 'linear':
k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio)
warmup_lr = [_lr * (1 - k) for _lr in self.regular_lr]
elif self.warmup == 'exp':
k = self.warmup_ratio**(1 - cur_iters / self.warmup_iters)
warmup_lr = [_lr * k for _lr in self.regular_lr]
return warmup_lr

def get_regular_lr(self, runner):
if isinstance(runner.optimizer, dict):
lr_groups = {}
for k in runner.optimizer.keys():
_lr_group = [
self.get_lr(runner, _base_lr)
for _base_lr in self.base_lr[k]
]
lr_groups.update({k: _lr_group})
return lr_groups
else:
return [self.get_lr(runner, _base_lr) for _base_lr in self.base_lr]

Metadata

Metadata

Assignees

Labels

No labels
No labels

Type

No type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions