Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] Support HyperBand and BOHB scheduler #101

Merged
merged 36 commits into from
Dec 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
a234607
Bump ray from 1.9.1 to 2.1.0
KKIEEK Nov 30, 2022
f6e85e4
Init
Dec 1, 2022
cac34ea
Update mmseg config
Dec 1, 2022
756147b
Fix deprecated warning
Dec 1, 2022
73aa245
Fix trainable function signature
Dec 1, 2022
f8fa7b2
Fix rewriter
Dec 1, 2022
5d3ac5b
Fix minor
Dec 1, 2022
04a5250
Fix reporter
Dec 2, 2022
59a86da
Fix apis
Dec 2, 2022
4fb42dd
Fix RayCheckpointHook
Dec 2, 2022
2c1215c
Fix requirements
Dec 2, 2022
709bb9c
Fix test code for rewriters
Dec 2, 2022
940320b
Fix test code for hooks
Dec 2, 2022
cddfc3c
Fix test code for tasks
Dec 2, 2022
b47f3c0
Fix test code for apis
Dec 2, 2022
7fd3e67
Merge branch 'main' into ray/v2.1.0
KKIEEK Dec 14, 2022
ca42bfc
:memo: Del checkpoint for base task proc
yhna940 Dec 15, 2022
411f307
Update siatune/apis/analysis.py
KKIEEK Dec 15, 2022
791111d
Update siatune/mm/tasks/mmtrainbase.py
KKIEEK Dec 15, 2022
703d5a1
Update siatune/mm/tasks/mmtrainbase.py
KKIEEK Dec 15, 2022
9eda02d
Support custom trainer and backend (#91)
KKIEEK Dec 15, 2022
082ea7b
Update siatune/mm/tasks/mmtrainbase.py
KKIEEK Dec 15, 2022
2cbe000
Merge branch 'main' into ray/v2.1.0
KKIEEK Dec 15, 2022
789ca62
Upgrade MMTask (#97)
KKIEEK Dec 16, 2022
d2ff007
Fix minor (#100)
KKIEEK Dec 16, 2022
e63911d
Update siatune/mm/tasks/mmtrainbase.py
KKIEEK Dec 16, 2022
c907be8
Support HyperBandScheduler
Dec 16, 2022
e56c8f8
Support BOHB
Dec 16, 2022
6943dcb
Refactor builder for searcher
Dec 16, 2022
33c75c7
Fix typo
Dec 16, 2022
8cf5c66
Fix test code for searcher
Dec 16, 2022
7c1cf9a
Fix
Dec 16, 2022
c6d7ef3
Update README.md
Dec 16, 2022
1f917fc
Update builder for searcher
KKIEEK Dec 16, 2022
08417e3
Merge
Dec 19, 2022
648df7a
Merge branch 'main' into v2.1.0/scheduler-cfg
KKIEEK Dec 19, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,12 @@ SIATune is an open-source deep learning model hyperparameter tuning toolbox espe
- **Schedule multiple experiments**

Various scheduling techniques are supported to efficiently manage many experiments.
- [x] [AsyncHyperBandScheduler](https://arxiv.org/abs/1810.05934)
- [ ] [PopulationBasedTraining](https://www.deepmind.com/blog/population-based-training-of-neural-networks)
- [ ] [MedianStoppingRule](https://research.google.com/pubs/pub46180.html)
- [x] [Asynchronous HyperBand](https://arxiv.org/abs/1810.05934)
- [x] [HyperBand](https://arxiv.org/abs/1603.06560)
- [ ] [Median Stopping Rule](https://research.google.com/pubs/pub46180.html)
- [ ] [Population Based Training](https://www.deepmind.com/blog/population-based-training-of-neural-networks)
- [ ] [Population Based Bandits](https://arxiv.org/abs/2002.02518)
- [ ] [HyperBandScheduler](https://arxiv.org/abs/1603.06560)
- [x] [Bayesian Optimization and HyperBand](https://arxiv.org/abs/1807.01774)


- **Distributed tuning system based on Ray**
Expand Down
2 changes: 2 additions & 0 deletions configs/_base_/scheduler/bohb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
trial_scheduler = dict(
type='HyperBandForBOHB', time_attr='training_iteration', max_t=20)
2 changes: 2 additions & 0 deletions configs/_base_/scheduler/hb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
trial_scheduler = dict(
type='HyperBandScheduler', time_attr='training_iteration', max_t=20)
1 change: 1 addition & 0 deletions configs/_base_/searcher/bohb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
searcher = dict(type='TuneBOHB')
5 changes: 1 addition & 4 deletions configs/_base_/searcher/nevergrad_oneplusone.py
Original file line number Diff line number Diff line change
@@ -1,4 +1 @@
searcher = dict(
type='NevergradSearch',
budget=256,
)
searcher = dict(type='NevergradSearch', budget=256)
6 changes: 1 addition & 5 deletions configs/_base_/searcher/nevergrad_pso.py
Original file line number Diff line number Diff line change
@@ -1,5 +1 @@
searcher = dict(
type='NevergradSearch',
optimizer='PSO',
budget=256,
)
searcher = dict(type='NevergradSearch', optimizer='PSO', budget=256)
2 changes: 2 additions & 0 deletions requirements/optional.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
bayesian-optimization==1.2.0
ConfigSpace
flaml==1.0.14
hpbandster
hyperopt==0.2.5
mlflow==1.23.1
nevergrad==0.4.3.post7
Expand Down
4 changes: 2 additions & 2 deletions siatune/ray/schedulers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (c) SI-Analytics. All rights reserved.
from .builder import SCHEDULERS, build_scheduler
from .builder import TRIAL_SCHEDULERS, build_scheduler
from .pbt import PopulationBasedTraining

__all__ = ['SCHEDULERS', 'build_scheduler', 'PopulationBasedTraining']
__all__ = ['TRIAL_SCHEDULERS', 'build_scheduler', 'PopulationBasedTraining']
9 changes: 5 additions & 4 deletions siatune/ray/schedulers/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@

from mmcv.utils import Config, Registry
from ray import tune
from ray.tune.schedulers import TrialScheduler

SCHEDULERS = Registry('schedulers')
TRIAL_SCHEDULERS = Registry('trial scheduler')
for v in set(tune.schedulers.SCHEDULER_IMPORT.values()):
if not inspect.isclass(v):
continue
SCHEDULERS.register_module(module=v)
TRIAL_SCHEDULERS.register_module(module=v)


def build_scheduler(cfg: Config) -> tune.schedulers.TrialScheduler:
def build_scheduler(cfg: Config) -> TrialScheduler:
"""Build the scheduler from configs.

Args:
Expand All @@ -20,4 +21,4 @@ def build_scheduler(cfg: Config) -> tune.schedulers.TrialScheduler:
tune.schedulers.TrialScheduler: The scheduler.
"""

return SCHEDULERS.build(cfg)
return TRIAL_SCHEDULERS.build(cfg)
4 changes: 2 additions & 2 deletions siatune/ray/schedulers/pbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
PopulationBasedTraining as _PopulationBasedTraining
from ray.tune.search.sample import Domain

from siatune.ray.schedulers import SCHEDULERS
from siatune.ray.schedulers import TRIAL_SCHEDULERS
from siatune.ray.spaces import build_space
from siatune.utils import ImmutableContainer

Expand Down Expand Up @@ -50,7 +50,7 @@ def explore(
return new_config


@SCHEDULERS.register_module(force=True)
@TRIAL_SCHEDULERS.register_module(force=True)
class PopulationBasedTraining(_PopulationBasedTraining):

def __init__(self, *args, **kwargs) -> None:
Expand Down
7 changes: 1 addition & 6 deletions siatune/ray/searchers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
# Copyright (c) SI-Analytics. All rights reserved.
from .builder import SEARCHERS, build_searcher
from .flaml import BlendSearch, CFOSearch
from .hyperopt import HyperOptSearch
from .nevergrad import NevergradSearch

__all__ = [
'SEARCHERS', 'build_searcher', 'BlendSearch', 'CFOSearch',
'HyperOptSearch', 'NevergradSearch'
]
__all__ = ['SEARCHERS', 'build_searcher', 'NevergradSearch']
4 changes: 4 additions & 0 deletions siatune/ray/searchers/builder.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
# Copyright (c) SI-Analytics. All rights reserved.

from mmcv.utils import Config, Registry
from ray import tune
from ray.tune.search import Searcher

SEARCHERS = Registry('searchers')
for func in set(tune.search.SEARCH_ALG_IMPORT.values()):
SEARCHERS.register_module(module=func())


def build_searcher(cfg: Config) -> Searcher:
Expand Down
15 changes: 0 additions & 15 deletions siatune/ray/searchers/flaml.py

This file was deleted.

9 changes: 0 additions & 9 deletions siatune/ray/searchers/hyperopt.py

This file was deleted.

6 changes: 3 additions & 3 deletions siatune/ray/searchers/nevergrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
optimizer_registry = dict()


@SEARCHERS.register_module()
@SEARCHERS.register_module(force=True)
class NevergradSearch(_NevergradSearch):
"""Search with Nevergrad."""

Expand Down Expand Up @@ -104,8 +104,8 @@ def _setup_nevergrad(self) -> None:
if len(self._nevergrad_opt.instrumentation.args) != 1:
raise ValueError(
'Instrumented optimizers should use kwargs only')
if self._parameters is not None and \
self._nevergrad_opt.dimension != len(self._parameters):
if self._parameters is not None and (self._nevergrad_opt.dimension !=
len(self._parameters)):
raise ValueError('len(parameters_names) must match optimizer '
'dimension for non-instrumented optimizers')

Expand Down
4 changes: 2 additions & 2 deletions tests/test_ray/test_schedulers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from siatune.ray.schedulers import SCHEDULERS, build_scheduler
from siatune.ray.schedulers import TRIAL_SCHEDULERS, build_scheduler


def test_build_schedulers():

@SCHEDULERS.register_module()
@TRIAL_SCHEDULERS.register_module()
class TestScheduler:
pass

Expand Down
31 changes: 24 additions & 7 deletions tests/test_ray/test_searchers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import pytest
from ray import tune

from siatune.ray.searchers import (SEARCHERS, BlendSearch, CFOSearch,
HyperOptSearch, NevergradSearch,
build_searcher)
from siatune.ray.searchers import SEARCHERS, build_searcher


def test_build_searcher():
Expand Down Expand Up @@ -39,7 +37,17 @@ def test_blend(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=BlendSearch(),
search_alg=build_searcher(dict(type='BlendSearch')),
num_samples=2,
config=config)


def test_bohb(trainable, config):
tune.run(
trainable,
metric='mean_loss',
mode='min',
search_alg=build_searcher(dict(type='TuneBOHB')),
num_samples=2,
config=config)

Expand All @@ -49,7 +57,7 @@ def test_cfo(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=CFOSearch(),
search_alg=build_searcher(dict(type='CFO')),
num_samples=2,
config=config)

Expand All @@ -59,7 +67,7 @@ def test_hyperopt(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=HyperOptSearch(),
search_alg=build_searcher(dict(type='HyperOptSearch')),
num_samples=2,
config=config)

Expand All @@ -69,6 +77,15 @@ def test_nevergrad(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=NevergradSearch(optimizer='PSO', budget=2),
search_alg=build_searcher(dict(type='NevergradSearch', budget=1)),
num_samples=2,
config=config)

tune.run(
trainable,
metric='mean_loss',
mode='min',
search_alg=build_searcher(
dict(type='NevergradSearch', optimizer='PSO', budget=1)),
num_samples=2,
config=config)