Skip to content

Commit

Permalink
Removing soft_eval_constraints
Browse files Browse the repository at this point in the history
Summary: `soft_eval_constraints` has been deprecated for 7 months. This commit removed the function before the next BoTorch release.

Differential Revision: D54207128
  • Loading branch information
SebastianAment authored and facebook-github-bot committed Feb 26, 2024
1 parent 02e8014 commit dd11c49
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 38 deletions.
28 changes: 0 additions & 28 deletions botorch/utils/objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@

from __future__ import annotations

import warnings

from typing import Callable, List, Optional, Union

import torch
Expand Down Expand Up @@ -182,32 +180,6 @@ def compute_smoothed_feasibility_indicator(
return is_feasible if log else is_feasible.exp()


# TODO: deprecate this function
def soft_eval_constraint(lhs: Tensor, eta: float = 1e-3) -> Tensor:
r"""Element-wise evaluation of a constraint in a 'soft' fashion
`value(x) = 1 / (1 + exp(x / eta))`
Args:
lhs: The left hand side of the constraint `lhs <= 0`.
eta: The temperature parameter of the softmax function. As eta
decreases, this approximates the Heaviside step function.
Returns:
Element-wise 'soft' feasibility indicator of the same shape as `lhs`.
For each element `x`, `value(x) -> 0` as `x` becomes positive, and
`value(x) -> 1` as x becomes negative.
"""
warnings.warn(
"`soft_eval_constraint` is deprecated. Please consider `torch.utils.sigmoid` "
+ "with its `fat` and `log` options to compute feasibility indicators.",
DeprecationWarning,
)
if eta <= 0:
raise ValueError("eta must be positive.")
return torch.sigmoid(-lhs / eta)


def apply_constraints(
obj: Tensor,
constraints: List[Callable[[Tensor], Tensor]],
Expand Down
10 changes: 0 additions & 10 deletions test/utils/test_objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from botorch.utils.objective import (
compute_feasibility_indicator,
compute_smoothed_feasibility_indicator,
soft_eval_constraint,
)
from botorch.utils.testing import BotorchTestCase
from torch import Tensor
Expand Down Expand Up @@ -75,15 +74,6 @@ def test_apply_constraints(self):
eta=0.0,
)

# soft_eval_constraint is not in the path of apply_constraints, adding this test
# for coverage.
with self.assertRaisesRegex(ValueError, "eta must be positive."):
soft_eval_constraint(lhs=obj, eta=0.0)
ind = soft_eval_constraint(lhs=ones_f(samples), eta=1e-6)
self.assertAllClose(ind, torch.zeros_like(ind))
ind = soft_eval_constraint(lhs=-ones_f(samples), eta=1e-6)
self.assertAllClose(ind, torch.ones_like(ind))

def test_apply_constraints_multi_output(self):
# nonnegative objective, one constraint
tkwargs = {"device": self.device}
Expand Down

0 comments on commit dd11c49

Please sign in to comment.