From 71bba9c04d902cf807c4f81c67b735692d1a4047 Mon Sep 17 00:00:00 2001 From: Louis Tiao Date: Thu, 22 Aug 2024 20:01:25 -0700 Subject: [PATCH] Deprecate `no_bayesian_optimization` argument in favor of `force_random_search` (#2693) Summary: Pull Request resolved: https://github.com/facebook/Ax/pull/2693 This deprecates the `no_bayesian_optimization` argument in favor of `force_random_search`. It is a "soft" deprecation in which we continue to provide support for the `no_bayesian_optimization` argument, but we turn it into an *optional* one and raise a deprecation warning when it is specified explicitly by the user. If this soft deprecation does not start any fires in the coming few weeks, we will move forward with a "hard" deprecation in follow-up task T199632397. - In the `GenerationStrategyConfig` dataclass we turned `no_bayesian_optimization` into an [init-only variable](https://docs.python.org/3/library/dataclasses.html#init-only-variables) (so it no longer become a "field"). - Everywhere that it appears as an argument, `no_bayesian_optimization` is now an *optional* argument with a default value of `None`. Whenever it is not `None`, we raise a deprecation warning and, whenever its value conflicts with `force_random_search` (non-optional, default `False`), we raise a `ValueError`. Differential Revision: D61601511 --- ax/modelbridge/dispatch_utils.py | 16 +++++++++++----- ax/utils/common/deprecation.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 5 deletions(-) create mode 100644 ax/utils/common/deprecation.py diff --git a/ax/modelbridge/dispatch_utils.py b/ax/modelbridge/dispatch_utils.py index 633fa993df9..1254e557dbd 100644 --- a/ax/modelbridge/dispatch_utils.py +++ b/ax/modelbridge/dispatch_utils.py @@ -23,6 +23,7 @@ from ax.models.torch.botorch_modular.model import BoTorchModel as ModularBoTorchModel from ax.models.types import TConfig from ax.models.winsorization_config import WinsorizationConfig +from ax.utils.common.deprecation import _validate_force_random_search from ax.utils.common.logger import get_logger from ax.utils.common.typeutils import not_none @@ -295,7 +296,8 @@ def choose_generation_strategy( Union[WinsorizationConfig, dict[str, WinsorizationConfig]] ] = None, derelativize_with_raw_status_quo: bool = False, - no_bayesian_optimization: bool = False, + no_bayesian_optimization: Optional[bool] = None, + force_random_search: bool = False, num_trials: Optional[int] = None, num_initialization_trials: Optional[int] = None, num_completed_initialization_trials: int = 0, @@ -347,8 +349,9 @@ def choose_generation_strategy( Winsorization when relative constraints are present. Note: automatic Winsorization will fail if this is set to `False` (or unset) and there are relative constraints present. - no_bayesian_optimization: If True, Bayesian optimization generation - strategy will not be suggested and quasi-random strategy will be used. + no_bayesian_optimization: Deprecated. Use `force_random_search`. + force_random_search: If True, quasi-random generation strategy will be used + rather than Bayesian optimization. num_trials: Total number of trials in the optimization, if known in advance. num_initialization_trials: Specific number of initialization trials, if wanted. @@ -441,7 +444,10 @@ def choose_generation_strategy( sobol_parallelism = None # No restriction on Sobol phase bo_parallelism = DEFAULT_BAYESIAN_PARALLELISM - if not no_bayesian_optimization and suggested_model is not None: + # TODO[T199632397] Remove + _validate_force_random_search(no_bayesian_optimization, force_random_search) + + if not force_random_search and suggested_model is not None: if not enforce_sequential_optimization and ( max_parallelism_override or max_parallelism_cap ): @@ -546,7 +552,7 @@ def choose_generation_strategy( f" {num_remaining_initialization_trials} will take longer to generate due" " to model-fitting." ) - else: # `no_bayesian_optimization` is True or we could not suggest BO model + else: # `force_random_search` is True or we could not suggest BO model if verbose is not None: logger.warning( f"Ignoring `verbose = {verbose}` for `generation_strategy` " diff --git a/ax/utils/common/deprecation.py b/ax/utils/common/deprecation.py new file mode 100644 index 00000000000..4e61d62849f --- /dev/null +++ b/ax/utils/common/deprecation.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import warnings +from typing import Optional, Type + + +def _validate_force_random_search( + no_bayesian_optimization: Optional[bool] = None, + force_random_search: bool = False, + exception_cls: Type[Exception] = ValueError, +) -> None: + if no_bayesian_optimization is not None: + # users are effectively permitted to continue using + # `no_bayesian_optimization` so long as it doesn't + # conflict with `force_random_search` + if no_bayesian_optimization != force_random_search: + raise exception_cls( + "Conflicting values for `force_random_search` " + "and `no_bayesian_optimization`! " + "Please only specify `force_random_search`." + ) + warnings.warn( + "`no_bayesian_optimization` is deprecated. Please use " + "`force_random_search` in the future.", + DeprecationWarning, + stacklevel=2, + )