Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

support trial config deduplication in SMAC tuner #1840

Merged
merged 4 commits into from
Dec 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/en_US/Tuner/BuiltinTuner.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ Similar to TPE, SMAC is also a black-box tuner which can be tried in various sce
**Requirement of classArgs**

* **optimize_mode** (*maximize or minimize, optional, default = maximize*) - If 'maximize', the tuner will target to maximize metrics. If 'minimize', the tuner will target to minimize metrics.
* **config_dedup** (*True or False, optional, default = False*) - If True, the tuner will not generate a configuration that has been already generated. If False, a configuration may be generated twice, but it is rare for relatively large search space.

**Usage example**

Expand Down
31 changes: 27 additions & 4 deletions src/sdk/pynni/nni/smac_tuner/smac_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,26 +18,31 @@

from ConfigSpaceNNI import Configuration

import nni
from nni.tuner import Tuner
from nni.utils import OptimizeMode, extract_scalar_reward

from .convert_ss_to_scenario import generate_scenario

logger = logging.getLogger('smac_AutoML')

class SMACTuner(Tuner):
"""
This is a wrapper of [SMAC](https://github.com/automl/SMAC3) following NNI tuner interface.
It only supports ``SMAC`` mode, and does not support the multiple instances of SMAC3 (i.e.,
the same configuration is run multiple times).
"""
def __init__(self, optimize_mode="maximize"):
def __init__(self, optimize_mode="maximize", config_dedup=False):
"""
Parameters
----------
optimize_mode : str
Optimize mode, 'maximize' or 'minimize', by default 'maximize'
config_dedup : bool
If True, the tuner will not generate a configuration that has been already generated.
If False, a configuration may be generated twice, but it is rare for relatively large search space.
"""
self.logger = logging.getLogger(
self.__module__ + "." + self.__class__.__name__)
self.logger = logger
self.optimize_mode = OptimizeMode(optimize_mode)
self.total_data = {}
self.optimizer = None
Expand All @@ -47,6 +52,7 @@ def __init__(self, optimize_mode="maximize"):
self.loguniform_key = set()
self.categorical_dict = {}
self.cs = None
self.dedup = config_dedup

def _main_cli(self):
"""
Expand Down Expand Up @@ -127,7 +133,7 @@ def update_search_space(self, search_space):
search_space : dict
The format could be referred to search space spec (https://nni.readthedocs.io/en/latest/Tutorial/SearchSpaceSpec.html).
"""

self.logger.info('update search space in SMAC.')
if not self.update_ss_done:
self.categorical_dict = generate_scenario(search_space)
if self.categorical_dict is None:
Expand Down Expand Up @@ -225,9 +231,19 @@ def generate_parameters(self, parameter_id, **kwargs):
return self.param_postprocess(init_challenger.get_dictionary())
else:
challengers = self.smbo_solver.nni_smac_request_challengers()
challengers_empty = True
for challenger in challengers:
challengers_empty = False
if self.dedup:
match = [v for k, v in self.total_data.items() \
if v.get_dictionary() == challenger.get_dictionary()]
if match:
continue
self.total_data[parameter_id] = challenger
return self.param_postprocess(challenger.get_dictionary())
assert challengers_empty is False, 'The case that challengers is empty is not handled.'
self.logger.info('In generate_parameters: No more new parameters.')
raise nni.NoMoreTrialError('No more new parameters.')

def generate_multiple_parameters(self, parameter_id_list, **kwargs):
"""
Expand Down Expand Up @@ -261,9 +277,16 @@ def generate_multiple_parameters(self, parameter_id_list, **kwargs):
for challenger in challengers:
if cnt >= len(parameter_id_list):
break
if self.dedup:
match = [v for k, v in self.total_data.items() \
if v.get_dictionary() == challenger.get_dictionary()]
if match:
continue
self.total_data[parameter_id_list[cnt]] = challenger
params.append(self.param_postprocess(challenger.get_dictionary()))
cnt += 1
if self.dedup and not params:
self.logger.info('In generate_multiple_parameters: No more new parameters.')
return params

def import_data(self, data):
Expand Down
13 changes: 11 additions & 2 deletions tools/nni_cmd/config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,23 @@ def setPathCheck(key):
}
}
tuner_schema_dict = {
('Anneal', 'SMAC'): {
'builtinTunerName': setChoice('builtinTunerName', 'Anneal', 'SMAC'),
'Anneal': {
'builtinTunerName': 'Anneal',
Optional('classArgs'): {
'optimize_mode': setChoice('optimize_mode', 'maximize', 'minimize'),
},
Optional('includeIntermediateResults'): setType('includeIntermediateResults', bool),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
},
'SMAC': {
'builtinTunerName': 'SMAC',
Optional('classArgs'): {
'optimize_mode': setChoice('optimize_mode', 'maximize', 'minimize'),
'config_dedup': setType('config_dedup', bool)
},
Optional('includeIntermediateResults'): setType('includeIntermediateResults', bool),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
},
('Evolution'): {
'builtinTunerName': setChoice('builtinTunerName', 'Evolution'),
Optional('classArgs'): {
Expand Down