From ec02d45bfa4dedd3ed539061c2e81a23e93dca65 Mon Sep 17 00:00:00 2001 From: Elizabeth Santorella Date: Sat, 6 Apr 2024 16:16:49 -0700 Subject: [PATCH] make qNIPV not an `AnalyticAcquisitionFunction`; optimize_acqf support clarity Summary: I put a `LogExpectedImprovement` instance into `optimize_acqf`, and when I got an error about it not having an attribute `X_pending`, I was not sure if this was a bug or if I did something known to be unsupported. - Make `qNegIntegratedPosteriorVariance` inherit from `AcquisitionFunction` rather than `AnalyticAcquisitionFunction`, because the functionality it was inheriting from `AnalyticAcquisitionFunction` was not relevant. - `qNegIntegratedPosteriorVariance` loses an error message about not supporting multi-output with a `PosteriorTransform` that is not scalarized and gains a unit test showing that it does. Differential Revision: D55843171 --- botorch/acquisition/active_learning.py | 7 +++-- botorch/optim/optimize.py | 4 ++- test/acquisition/test_active_learning.py | 40 ++++++++++++++---------- test/acquisition/test_fixed_feature.py | 1 + 4 files changed, 31 insertions(+), 21 deletions(-) diff --git a/botorch/acquisition/active_learning.py b/botorch/acquisition/active_learning.py index 61c7717867..4125a88b11 100644 --- a/botorch/acquisition/active_learning.py +++ b/botorch/acquisition/active_learning.py @@ -27,7 +27,7 @@ import torch from botorch import settings -from botorch.acquisition.analytic import AnalyticAcquisitionFunction +from botorch.acquisition.acquisition import AcquisitionFunction from botorch.acquisition.monte_carlo import MCAcquisitionFunction from botorch.acquisition.objective import MCAcquisitionObjective, PosteriorTransform from botorch.models.model import Model @@ -37,7 +37,7 @@ from torch import Tensor -class qNegIntegratedPosteriorVariance(AnalyticAcquisitionFunction): +class qNegIntegratedPosteriorVariance(AcquisitionFunction): r"""Batch Integrated Negative Posterior Variance for Active Learning. This acquisition function quantifies the (negative) integrated posterior variance @@ -75,7 +75,8 @@ def __init__( points that have been submitted for function evaluation but have not yet been evaluated. """ - super().__init__(model=model, posterior_transform=posterior_transform) + super().__init__(model=model) + self.posterior_transform = posterior_transform if sampler is None: # If no sampler is provided, we use the following dummy sampler for the # fantasize() method in forward. IMPORTANT: This assumes that the posterior diff --git a/botorch/optim/optimize.py b/botorch/optim/optimize.py index dd1f9e22da..49de8bcd65 100644 --- a/botorch/optim/optimize.py +++ b/botorch/optim/optimize.py @@ -153,6 +153,7 @@ def _raise_deprecation_warning_if_kwargs(fn_name: str, kwargs: Dict[str, Any]) - f"`{fn_name}` does not support arguments {list(kwargs.keys())}. In " "the future, this will become an error.", DeprecationWarning, + stacklevel=2, ) @@ -366,7 +367,7 @@ def _optimize_batch_candidates() -> Tuple[Tensor, Tensor, List[Warning]]: f"warning(s):\n{[w.message for w in ws]}\nTrying again with a new " "set of initial conditions." ) - warnings.warn(first_warn_msg, RuntimeWarning) + warnings.warn(first_warn_msg, RuntimeWarning, stacklevel=2) if not initial_conditions_provided: batch_initial_conditions = opt_inputs.get_ic_generator()( @@ -392,6 +393,7 @@ def _optimize_batch_candidates() -> Tuple[Tensor, Tensor, List[Warning]]: "Optimization failed on the second try, after generating a " "new set of initial conditions.", RuntimeWarning, + stacklevel=2, ) if opt_inputs.post_processing_func is not None: diff --git a/test/acquisition/test_active_learning.py b/test/acquisition/test_active_learning.py index f2b53455b4..9ed02b9d4a 100644 --- a/test/acquisition/test_active_learning.py +++ b/test/acquisition/test_active_learning.py @@ -77,6 +77,7 @@ def test_q_neg_int_post_variance(self): val = qNIPV(X) val_exp = -variance.mean(dim=-2).squeeze(-1) self.assertAllClose(val, val_exp, atol=1e-4) + # multi-output model mean = torch.zeros(4, 2, device=self.device, dtype=dtype) variance = torch.rand(4, 2, device=self.device, dtype=dtype) @@ -84,24 +85,29 @@ def test_q_neg_int_post_variance(self): f_posterior = GPyTorchPosterior(MultitaskMultivariateNormal(mean, cov)) mc_points = torch.rand(10, 1, device=self.device, dtype=dtype) mfm = MockModel(f_posterior) - with mock.patch.object(MockModel, "fantasize", return_value=mfm): - with mock.patch(no, new_callable=mock.PropertyMock) as mock_num_outputs: - mock_num_outputs.return_value = 2 - mm = MockModel(None) + with mock.patch.object( + MockModel, "fantasize", return_value=mfm + ), mock.patch(no, new_callable=mock.PropertyMock) as mock_num_outputs: + mock_num_outputs.return_value = 2 + mm = MockModel(None) + + weights = torch.tensor([0.5, 0.5], device=self.device, dtype=dtype) + qNIPV = qNegIntegratedPosteriorVariance( + model=mm, + mc_points=mc_points, + posterior_transform=ScalarizedPosteriorTransform(weights=weights), + ) + X = torch.empty(1, 1, device=self.device, dtype=dtype) # dummy + val = qNIPV(X) + self.assertAllClose(val, -0.5 * variance.mean(), atol=1e-4) + # without posterior_transform + qNIPV = qNegIntegratedPosteriorVariance( + model=mm, + mc_points=mc_points, + ) + val = qNIPV(X) + self.assertAllClose(val, -variance.mean(0), atol=1e-4) - weights = torch.tensor([0.5, 0.5], device=self.device, dtype=dtype) - qNIPV = qNegIntegratedPosteriorVariance( - model=mm, - mc_points=mc_points, - posterior_transform=ScalarizedPosteriorTransform( - weights=weights - ), - ) - X = torch.empty(1, 1, device=self.device, dtype=dtype) # dummy - val = qNIPV(X) - self.assertTrue( - torch.allclose(val, -0.5 * variance.mean(), atol=1e-4) - ) # batched multi-output model mean = torch.zeros(4, 3, 1, 2, device=self.device, dtype=dtype) variance = torch.rand(4, 3, 1, 2, device=self.device, dtype=dtype) diff --git a/test/acquisition/test_fixed_feature.py b/test/acquisition/test_fixed_feature.py index 1f5218d228..a7ec25a6fe 100644 --- a/test/acquisition/test_fixed_feature.py +++ b/test/acquisition/test_fixed_feature.py @@ -12,6 +12,7 @@ get_dtype_of_sequence, ) from botorch.acquisition.monte_carlo import qExpectedImprovement +from botorch.exceptions import UnsupportedError from botorch.models import SingleTaskGP from botorch.utils.testing import BotorchTestCase, MockAcquisitionFunction