Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] Add SegmindLoggerHook #1650

Merged
merged 17 commits into from
Mar 3, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions mmcv/runner/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@
Fp16OptimizerHook, GradientCumulativeFp16OptimizerHook,
GradientCumulativeOptimizerHook, Hook, IterTimerHook,
LoggerHook, MlflowLoggerHook, NeptuneLoggerHook,
OptimizerHook, PaviLoggerHook, SyncBuffersHook,
TensorboardLoggerHook, TextLoggerHook, WandbLoggerHook)
OptimizerHook, PaviLoggerHook, SegmindLoggerHook,
SyncBuffersHook, TensorboardLoggerHook, TextLoggerHook,
WandbLoggerHook)
from .hooks.lr_updater import StepLrUpdaterHook # noqa
from .hooks.lr_updater import (CosineAnnealingLrUpdaterHook,
CosineRestartLrUpdaterHook, CyclicLrUpdaterHook,
Expand Down Expand Up @@ -60,5 +61,6 @@
'allreduce_params', 'LossScaler', 'CheckpointLoader', 'BaseModule',
'_load_checkpoint_with_prefix', 'EvalHook', 'DistEvalHook', 'Sequential',
'ModuleDict', 'ModuleList', 'GradientCumulativeOptimizerHook',
'GradientCumulativeFp16OptimizerHook', 'DefaultRunnerConstructor'
'GradientCumulativeFp16OptimizerHook', 'DefaultRunnerConstructor',
'SegmindLoggerHook'
]
7 changes: 4 additions & 3 deletions mmcv/runner/hooks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
from .hook import HOOKS, Hook
from .iter_timer import IterTimerHook
from .logger import (DvcliveLoggerHook, LoggerHook, MlflowLoggerHook,
NeptuneLoggerHook, PaviLoggerHook, TensorboardLoggerHook,
TextLoggerHook, WandbLoggerHook)
NeptuneLoggerHook, PaviLoggerHook, SegmindLoggerHook,
TensorboardLoggerHook, TextLoggerHook, WandbLoggerHook)
from .lr_updater import (CosineAnnealingLrUpdaterHook,
CosineRestartLrUpdaterHook, CyclicLrUpdaterHook,
ExpLrUpdaterHook, FixedLrUpdaterHook,
Expand Down Expand Up @@ -38,5 +38,6 @@
'StepMomentumUpdaterHook', 'CosineAnnealingMomentumUpdaterHook',
'CyclicMomentumUpdaterHook', 'OneCycleMomentumUpdaterHook',
'SyncBuffersHook', 'EMAHook', 'EvalHook', 'DistEvalHook', 'ProfilerHook',
'GradientCumulativeOptimizerHook', 'GradientCumulativeFp16OptimizerHook'
'GradientCumulativeOptimizerHook', 'GradientCumulativeFp16OptimizerHook',
'SegmindLoggerHook'
]
3 changes: 2 additions & 1 deletion mmcv/runner/hooks/logger/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
from .mlflow import MlflowLoggerHook
from .neptune import NeptuneLoggerHook
from .pavi import PaviLoggerHook
from .segmind import SegmindLoggerHook
from .tensorboard import TensorboardLoggerHook
from .text import TextLoggerHook
from .wandb import WandbLoggerHook

__all__ = [
'LoggerHook', 'MlflowLoggerHook', 'PaviLoggerHook',
'TensorboardLoggerHook', 'TextLoggerHook', 'WandbLoggerHook',
'NeptuneLoggerHook', 'DvcliveLoggerHook'
'NeptuneLoggerHook', 'DvcliveLoggerHook', 'SegmindLoggerHook'
]
60 changes: 60 additions & 0 deletions mmcv/runner/hooks/logger/segmind.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# Copyright (c) OpenMMLab. All rights reserved.
import numbers

from ...dist_utils import master_only
from ..hook import HOOKS
from .base import LoggerHook


@HOOKS.register_module()
class SegmindLoggerHook(LoggerHook):

saurbhc marked this conversation as resolved.
Show resolved Hide resolved
def __init__(self,
init_kwargs=None,
interval=10,
ignore_last=True,
reset_flag=True):
super(SegmindLoggerHook, self).__init__(interval, ignore_last,
reset_flag)
self.import_segmind()

def import_segmind(self):
try:
import segmind
from segmind.tracking.fluent import log_metrics
from segmind.utils.logging_utils import try_mlflow_log
except ImportError:
raise ImportError("Please run 'pip install segmind' to install \
segmind")
saurbhc marked this conversation as resolved.
Show resolved Hide resolved
self.segmind = segmind
saurbhc marked this conversation as resolved.
Show resolved Hide resolved
self.log_metrics = log_metrics
self.try_mlflow_log = try_mlflow_log

@master_only
def before_run(self, runner):
super(SegmindLoggerHook, self).before_run(runner)
if self.segmind is None:
self.import_segmind()
saurbhc marked this conversation as resolved.
Show resolved Hide resolved

@master_only
def log(self, runner):
metrics = {}
saurbhc marked this conversation as resolved.
Show resolved Hide resolved

for var, val in runner.log_buffer.output.items():
if var in ['time', 'data_time']:
continue

tag = f'{var}_{runner.mode}'
if isinstance(val, numbers.Number):
metrics[tag] = val

metrics['learning_rate'] = runner.current_lr()[0]
metrics['momentum'] = runner.current_momentum()[0]

# logging metrics to segmind
self.try_mlflow_log(
self.log_metrics, metrics, step=runner.epoch, epoch=runner.epoch)

@master_only
def after_run(self, runner):
pass
saurbhc marked this conversation as resolved.
Show resolved Hide resolved
22 changes: 21 additions & 1 deletion tests/test_runner/test_hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
GradientCumulativeFp16OptimizerHook,
GradientCumulativeOptimizerHook, IterTimerHook,
MlflowLoggerHook, NeptuneLoggerHook, OptimizerHook,
PaviLoggerHook, WandbLoggerHook, build_runner)
PaviLoggerHook, SegmindLoggerHook, WandbLoggerHook,
build_runner)
from mmcv.runner.fp16_utils import auto_fp16
from mmcv.runner.hooks.hook import HOOKS, Hook
from mmcv.runner.hooks.lr_updater import (CosineRestartLrUpdaterHook,
Expand Down Expand Up @@ -1189,6 +1190,25 @@ def test_mlflow_hook(log_model):
assert not hook.mlflow_pytorch.log_model.called


def test_segmind_hook():
sys.modules['segmind'] = MagicMock()
runner = _build_demo_runner()
hook = SegmindLoggerHook()
loader = DataLoader(torch.ones((5, 2)))

runner.register_hook(hook)
runner.run([loader, loader], [('train', 1), ('val', 1)])
shutil.rmtree(runner.work_dir)

hook.segmind.init.assert_called_with()
hook.segmind.log.assert_called_with(
{
'learning_rate': 0.02,
'momentum': 0.95
}, step=6, commit=True)
hook.segmind.join.assert_called_with()
saurbhc marked this conversation as resolved.
Show resolved Hide resolved


def test_wandb_hook():
sys.modules['wandb'] = MagicMock()
runner = _build_demo_runner()
Expand Down