Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fix] mmdet with version >=2.24.1 #29

Merged
merged 1 commit into from
May 26, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions mmtune/mm/tasks/mmdet.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import argparse
import copy
import os
import time
from os import path as osp
from typing import Optional, Sequence
Expand Down Expand Up @@ -64,6 +65,8 @@ def parse_args(self, args: Sequence[str]) -> argparse.Namespace:
action='store_true',
help='enable automatically scaling LR.')
args = parser.parse_args(args)
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(dist.get_rank())
return args

def build_model(self,
Expand Down Expand Up @@ -95,7 +98,7 @@ def train_model(self,
def run(self, *args, **kwargs):
from mmdet import __version__
from mmdet.apis import init_random_seed, set_random_seed
from mmdet.utils import (collect_env, get_root_logger,
from mmdet.utils import (collect_env, get_device, get_root_logger,
setup_multi_processes)
args = self.args

Expand Down Expand Up @@ -155,8 +158,9 @@ def run(self, *args, **kwargs):
logger.info(f'Distributed training: {distributed}')
logger.info(f'Config:\n{cfg.pretty_text}')

cfg.device = get_device()
# set random seeds
seed = init_random_seed(args.seed)
seed = init_random_seed(args.seed, device=cfg.device)
seed = seed + dist.get_rank() if args.diff_seed else seed
logger.info(f'Set random seed to {seed}, '
f'deterministic: {args.deterministic}')
Expand Down