Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enabling candidate generation with both non_linear_constraints and fixed_features #1912

Closed
wants to merge 16 commits into from
18 changes: 9 additions & 9 deletions botorch/generation/gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,15 +114,12 @@ def gen_candidates_scipy(
# if there are fixed features we may optimize over a domain of lower dimension
reduced_domain = False
if fixed_features:
# TODO: We can support fixed features, see Max's comment on D33551393. We can
# consider adding this at a later point.
if nonlinear_inequality_constraints:
raise NotImplementedError(
"Fixed features are not supported when non-linear inequality "
"constraints are given."
)
# if there are no constraints things are straightforward
if not (inequality_constraints or equality_constraints):
# if there are no constraints, things are straightforward
if not (
inequality_constraints
or equality_constraints
or nonlinear_inequality_constraints
):
reduced_domain = True
# if there are we need to make sure features are fixed to specific values
else:
Expand All @@ -137,6 +134,7 @@ def gen_candidates_scipy(
upper_bounds=upper_bounds,
inequality_constraints=inequality_constraints,
equality_constraints=equality_constraints,
nonlinear_inequality_constraints=nonlinear_inequality_constraints,
)
# call the routine with no fixed_features
clamped_candidates, batch_acquisition = gen_candidates_scipy(
Expand All @@ -146,6 +144,7 @@ def gen_candidates_scipy(
upper_bounds=_no_fixed_features.upper_bounds,
inequality_constraints=_no_fixed_features.inequality_constraints,
equality_constraints=_no_fixed_features.equality_constraints,
nonlinear_inequality_constraints=_no_fixed_features.nonlinear_inequality_constraints, # noqa: E501
options=options,
fixed_features=None,
timeout_sec=timeout_sec,
Expand Down Expand Up @@ -342,6 +341,7 @@ def gen_candidates_torch(
upper_bounds=upper_bounds,
inequality_constraints=None,
equality_constraints=None,
nonlinear_inequality_constraints=None,
)

# call the routine with no fixed_features
Expand Down
15 changes: 13 additions & 2 deletions botorch/generation/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,14 @@
from __future__ import annotations

from dataclasses import dataclass
from typing import Dict, List, Optional, Tuple, Union
from typing import Callable, Dict, List, Optional, Tuple, Union

import torch
from botorch.acquisition import AcquisitionFunction, FixedFeatureAcquisitionFunction
from botorch.optim.parameter_constraints import _generate_unfixed_lin_constraints
from botorch.optim.parameter_constraints import (
_generate_unfixed_lin_constraints,
_generate_unfixed_nonlin_constraints,
)
from torch import Tensor


Expand Down Expand Up @@ -63,6 +66,7 @@ class _NoFixedFeatures:
upper_bounds: Optional[Union[float, Tensor]]
inequality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]]
equality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]]
nonlinear_inequality_constraints: Optional[List[Callable]]
jduerholt marked this conversation as resolved.
Show resolved Hide resolved


def _remove_fixed_features_from_optimization(
Expand All @@ -73,6 +77,7 @@ def _remove_fixed_features_from_optimization(
upper_bounds: Optional[Union[float, Tensor]],
inequality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]],
equality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]],
nonlinear_inequality_constraints: Optional[List[Callable]],
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
) -> _NoFixedFeatures:
"""
Given a set of non-empty fixed features, this function effectively reduces the
Expand Down Expand Up @@ -140,11 +145,17 @@ def _remove_fixed_features_from_optimization(
dimension=d,
eq=True,
)
nonlinear_inequality_constraints = _generate_unfixed_nonlin_constraints(
constraints=nonlinear_inequality_constraints,
fixed_features=fixed_features,
dimension=d,
)
return _NoFixedFeatures(
acquisition_function=acquisition_function,
initial_conditions=initial_conditions,
lower_bounds=lower_bounds,
upper_bounds=upper_bounds,
inequality_constraints=inequality_constraints,
equality_constraints=equality_constraints,
nonlinear_inequality_constraints=nonlinear_inequality_constraints,
)
37 changes: 36 additions & 1 deletion botorch/optim/parameter_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,13 +312,48 @@ def _make_linear_constraints(
return constraints


def _generate_unfixed_nonlin_constraints(
constraints: Optional[List[Callable]],
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
fixed_features: Dict[int, float],
dimension: int,
) -> Optional[List[Callable]]:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Two subjective suggestions to make this quicker to parse:

  1. How about _gen_nonlin_constraints_of_variable_features? Could apply same convention to the linear case below. (cc @Balandat)
  2. Could you add a doc-string here, something like "Given a dictionary of fixed_features, returns a list of non-linear constraints on the variable (non-fixed) features."

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I added a docsting. Concerning renaming: should I do it, or should it be done in a seperate PR for both methods (nonlinear and linear) to keep it consistent?

Copy link
Contributor

@SebastianAment SebastianAment Jul 6, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you! If you're up for it, you can do both in this commit. The only mentions of _generate_unfixed_lin_constraints are in:

  • generation/utils.py,
  • optim/parameter_constraints.py, and
  • test/optim/parameter_constraints.py.

A quick search and replace in these files should do it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, but do you really prefer the new name? What do you do not like about _generate_unfixed_lin_constraints?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But ofc, I can do it ;)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_generate_unfixed_(non)lin_constraints makes it sound like the constraints are "unfixed", whereas it is the features that are. But let's keep it then, especially since the docstring makes it clear now. Thanks for adding it!

jduerholt marked this conversation as resolved.
Show resolved Hide resolved
# If constraints is None or an empty list, then return itself
if not constraints:
return constraints

selector = []
idx_X, idx_f = 0, dimension - len(fixed_features)
for i in range(dimension):
if i in fixed_features.keys():
selector.append(idx_f)
idx_f += 1
else:
selector.append(idx_X)
idx_X += 1

values = torch.tensor([v for v in fixed_features.values()], dtype=torch.double)
jduerholt marked this conversation as resolved.
Show resolved Hide resolved

new_constraints = []

def _wrap_nlc(nlc: Callable) -> Callable:
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
def new_nlc(X: Tensor) -> Tensor:
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
ivalues = values.to(X).expand(*X.shape[:-1], len(fixed_features))
X_perm = torch.cat([X, ivalues], dim=-1)
return nlc(X_perm[..., selector])
jduerholt marked this conversation as resolved.
Show resolved Hide resolved

return new_nlc
jduerholt marked this conversation as resolved.
Show resolved Hide resolved

for nlc in constraints:
new_constraints.append(_wrap_nlc(nlc=nlc))
return new_constraints
jduerholt marked this conversation as resolved.
Show resolved Hide resolved


def _generate_unfixed_lin_constraints(
constraints: Optional[List[Tuple[Tensor, Tensor, float]]],
fixed_features: Dict[int, float],
dimension: int,
eq: bool,
) -> Optional[List[Tuple[Tensor, Tensor, float]]]:

# If constraints is None or an empty list, then return itself
if not constraints:
return constraints
Expand Down
1 change: 1 addition & 0 deletions test/generation/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ def check_cons(old_cons, new_cons):
upper_bounds=upper_bounds,
inequality_constraints=inequality_constraints,
equality_constraints=equality_constraints,
nonlinear_inequality_constraints=None,
)
self.assertIsInstance(
_no_ff.acquisition_function, FixedFeatureAcquisitionFunction
Expand Down
42 changes: 26 additions & 16 deletions test/optim/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -862,6 +862,32 @@ def nlc4(x):
torch.allclose(acq_value, torch.tensor(2.45, **tkwargs), atol=1e-3)
)

with torch.random.fork_rng():
torch.manual_seed(0)
batch_initial_conditions = torch.rand(num_restarts, 1, 3, **tkwargs)
batch_initial_conditions[..., 0] = 2

# test with fixed features
candidates, acq_value = optimize_acqf(
acq_function=mock_acq_function,
bounds=bounds,
q=1,
nonlinear_inequality_constraints=[nlc1, nlc2],
batch_initial_conditions=batch_initial_conditions,
num_restarts=num_restarts,
fixed_features={0: 2},
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm so with this setup this test may pass even if fixing the features doesn't work (given that it only narrows this to a specific solution of the previous solution set). Can you use a different value here? I might also make sense to use a different mock acquisition function so that the optimizer is unique...

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will have a deeper look to the test. All this mocking is sometimes a bit hard for me to understand ;)

Copy link
Contributor

@Balandat Balandat Jul 3, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think there is actually any mocking going on here - it's just that mock_acq_function is a SquaredAcquisitionFunction rather than a true acquisition function.

If you don't want to change that, one thing you could do would be to just replace one of the constraints to result in a different optimum / optimizer.

)
self.assertEqual(candidates[0, 0], 2.0)
self.assertTrue(
torch.allclose(
torch.sort(candidates).values,
torch.tensor([[0, 2, 2]], **tkwargs),
)
)
self.assertTrue(
torch.allclose(acq_value, torch.tensor(2.8284, **tkwargs), atol=1e-3)
)

# Test that an ic_generator object with the same API as
# gen_batch_initial_conditions returns candidates of the
# required shape.
Expand All @@ -879,22 +905,6 @@ def nlc4(x):
)
self.assertEqual(candidates.size(), torch.Size([1, 3]))

# Make sure fixed features aren't supported
with self.assertRaisesRegex(
NotImplementedError,
"Fixed features are not supported when non-linear inequality "
"constraints are given.",
):
optimize_acqf(
acq_function=mock_acq_function,
bounds=bounds,
q=1,
nonlinear_inequality_constraints=[nlc1, nlc2, nlc3, nlc4],
batch_initial_conditions=batch_initial_conditions,
num_restarts=num_restarts,
fixed_features={0: 0.1},
)

# Constraints must be passed in as lists
with self.assertRaisesRegex(
ValueError,
Expand Down
37 changes: 37 additions & 0 deletions test/optim/test_parameter_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from botorch.optim.parameter_constraints import (
_arrayify,
_generate_unfixed_lin_constraints,
_generate_unfixed_nonlin_constraints,
_make_linear_constraints,
eval_lin_constraint,
lin_constraint_jac,
Expand Down Expand Up @@ -215,6 +216,42 @@ def test_make_scipy_linear_constraints_unsupported(self):
equality_constraints=[(indices, coefficients, 1.0)],
)

def test_generate_unfixed_nonlin_constraints(self):
def nlc1(x):
return 4 - x.sum(dim=-1)

def nlc2(x):
return x[..., 0] - 1

# first test with one constraint
new_constraints = _generate_unfixed_nonlin_constraints(
constraints=[nlc1], fixed_features={1: 2.0}, dimension=3
)
self.assertAllClose(
nlc1(torch.tensor([[4.0, 2.0, 2.0]], device=self.device)),
new_constraints[0](torch.tensor([[4.0, 2.0]], device=self.device)),
)
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
# test with several constraints
constraints = [nlc1, nlc2]
new_constraints = _generate_unfixed_nonlin_constraints(
constraints=constraints, fixed_features={1: 2.0}, dimension=3
)
for i in range(2):
self.assertAllClose(
constraints[i](torch.tensor([[4.0, 2.0, 2.0]], device=self.device)),
new_constraints[i](torch.tensor([[4.0, 2.0]], device=self.device)),
)
jduerholt marked this conversation as resolved.
Show resolved Hide resolved
# test with several constraints and two fixes
constraints = [nlc1, nlc2]
new_constraints = _generate_unfixed_nonlin_constraints(
constraints=constraints, fixed_features={1: 2.0, 2: 1.0}, dimension=3
)
for i in range(2):
self.assertAllClose(
constraints[i](torch.tensor([[4.0, 2.0, 1.0]], device=self.device)),
new_constraints[i](torch.tensor([[4.0]], device=self.device)),
)

jduerholt marked this conversation as resolved.
Show resolved Hide resolved
def test_generate_unfixed_lin_constraints(self):
# Case 1: some fixed features are in the indices
indices = [
Expand Down