From 1682ccddc4d764c87d9785711b59e991c82155bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20P=2E=20D=C3=BCrholt?= Date: Sun, 16 Apr 2023 17:44:22 +0200 Subject: [PATCH 1/3] remove blocking for q > 1 --- botorch/generation/gen.py | 6 ------ botorch/optim/parameter_constraints.py | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/botorch/generation/gen.py b/botorch/generation/gen.py index 83a3944acf..a27ec398e2 100644 --- a/botorch/generation/gen.py +++ b/botorch/generation/gen.py @@ -213,12 +213,6 @@ def f_np_wrapper(x: np.ndarray, f: Callable): return fval if nonlinear_inequality_constraints: - # Make sure `batch_limit` is 1 for now. - if not (len(shapeX) == 3 and shapeX[:2] == torch.Size([1, 1])): - raise ValueError( - "`batch_limit` must be 1 when non-linear inequality constraints " - "are given." - ) constraints += make_scipy_nonlinear_inequality_constraints( nonlinear_inequality_constraints=nonlinear_inequality_constraints, f_np_wrapper=f_np_wrapper, diff --git a/botorch/optim/parameter_constraints.py b/botorch/optim/parameter_constraints.py index 55fce9ffea..b402e36ad4 100644 --- a/botorch/optim/parameter_constraints.py +++ b/botorch/optim/parameter_constraints.py @@ -381,7 +381,7 @@ def _make_f_and_grad_nonlinear_inequality_constraints( """ Create callables for objective + grad for the nonlinear inequality constraints. The Scipy interface requires specifying separate callables and we use caching to - avoid evaluating the same input twice. This caching onlh works if + avoid evaluating the same input twice. This caching only works if the returned functions are evaluated on the same input in immediate sequence (i.e., calling `f_obj(X_1)`, `f_grad(X_1)` will result in a single forward pass, while `f_obj(X_1)`, `f_grad(X_2)`, `f_obj(X_1)` From 52c1777eaa0bf762d812d51f5d1faf213d8f2bf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20P=2E=20D=C3=BCrholt?= Date: Sun, 16 Apr 2023 17:50:31 +0200 Subject: [PATCH 2/3] comment out test on batch_limit --- test/optim/test_optimize.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/test/optim/test_optimize.py b/test/optim/test_optimize.py index 19a0fec666..4b4422b5eb 100644 --- a/test/optim/test_optimize.py +++ b/test/optim/test_optimize.py @@ -850,20 +850,20 @@ def nlc4(x): batch_initial_conditions=4 * torch.ones(1, 1, 3, **tkwargs), ) # Explicitly setting batch_limit to be >1 should raise - with self.assertRaisesRegex( - ValueError, - "`batch_limit` must be 1 when non-linear inequality constraints " - "are given.", - ): - optimize_acqf( - acq_function=mock_acq_function, - bounds=bounds, - q=1, - nonlinear_inequality_constraints=[nlc1], - batch_initial_conditions=torch.rand(5, 1, 3, **tkwargs), - num_restarts=5, - options={"batch_limit": 5}, - ) + # with self.assertRaisesRegex( + # ValueError, + # "`batch_limit` must be 1 when non-linear inequality constraints " + # "are given.", + # ): + # optimize_acqf( + # acq_function=mock_acq_function, + # bounds=bounds, + # q=1, + # nonlinear_inequality_constraints=[nlc1], + # batch_initial_conditions=torch.rand(5, 1, 3, **tkwargs), + # num_restarts=5, + # options={"batch_limit": 5}, + # ) # If there are non-linear inequality constraints an initial condition # generator object `ic_generator` must be supplied. with self.assertRaisesRegex( From 24a778deeb438589576d8f9e51d0c66ddced0700 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20P=2E=20D=C3=BCrholt?= Date: Mon, 24 Apr 2023 15:00:20 +0200 Subject: [PATCH 3/3] add _make_nonlinear_constraints --- botorch/generation/gen.py | 1 + botorch/optim/parameter_constraints.py | 51 +++++++++++++++++++++----- 2 files changed, 43 insertions(+), 9 deletions(-) diff --git a/botorch/generation/gen.py b/botorch/generation/gen.py index a27ec398e2..e42225fd46 100644 --- a/botorch/generation/gen.py +++ b/botorch/generation/gen.py @@ -217,6 +217,7 @@ def f_np_wrapper(x: np.ndarray, f: Callable): nonlinear_inequality_constraints=nonlinear_inequality_constraints, f_np_wrapper=f_np_wrapper, x0=x0, + shapeX=shapeX, ) x0 = _arrayify(x0) diff --git a/botorch/optim/parameter_constraints.py b/botorch/optim/parameter_constraints.py index b402e36ad4..88565aa1d4 100644 --- a/botorch/optim/parameter_constraints.py +++ b/botorch/optim/parameter_constraints.py @@ -312,6 +312,33 @@ def _make_linear_constraints( return constraints +def _make_nonlinear_constraints( + f_np_wrapper: Callable, nlc: Callable, shapeX: torch.Size +) -> List: + shapeX = _validate_linear_constraints_shape_input(shapeX) + b, q, _ = shapeX + constraints = [] + + def get_interpoint_constraint(b: int, q: int, nlc: Callable) -> Callable: + return lambda x: nlc(x[b, q]) + + for i in range(b): + for j in range(q): + f_obj, f_grad = _make_f_and_grad_nonlinear_inequality_constraints( + f_np_wrapper=f_np_wrapper, + nlc=get_interpoint_constraint(b=i, q=j, nlc=nlc), + ) + constraints.append( + { + "type": "ineq", + "fun": f_obj, + "jac": f_grad, + } + ) + + return constraints + + def _generate_unfixed_lin_constraints( constraints: Optional[List[Tuple[Tensor, Tensor, float]]], fixed_features: Dict[int, float], @@ -415,6 +442,7 @@ def make_scipy_nonlinear_inequality_constraints( nonlinear_inequality_constraints: List[Callable], f_np_wrapper: Callable, x0: Tensor, + shapeX: torch.Size, ) -> List[Dict]: r"""Generate Scipy nonlinear inequality constraints from callables. @@ -447,14 +475,19 @@ def make_scipy_nonlinear_inequality_constraints( "`batch_initial_conditions` must satisfy the non-linear inequality " "constraints." ) - f_obj, f_grad = _make_f_and_grad_nonlinear_inequality_constraints( - f_np_wrapper=f_np_wrapper, nlc=nlc - ) - scipy_nonlinear_inequality_constraints.append( - { - "type": "ineq", - "fun": f_obj, - "jac": f_grad, - } + + scipy_nonlinear_inequality_constraints += _make_nonlinear_constraints( + f_np_wrapper=f_np_wrapper, nlc=nlc, shapeX=shapeX ) + + # f_obj, f_grad = _make_f_and_grad_nonlinear_inequality_constraints( + # f_np_wrapper=f_np_wrapper, nlc=nlc + # ) + # scipy_nonlinear_inequality_constraints.append( + # { + # "type": "ineq", + # "fun": f_obj, + # "jac": f_grad, + # } + # ) return scipy_nonlinear_inequality_constraints