Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Efficient NChooseKs in BO #156

Merged
merged 21 commits into from
Apr 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 58 additions & 1 deletion bofire/benchmarks/single.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import math
from typing import Optional

import numpy as np
import pandas as pd
import torch
from botorch.test_functions import Hartmann as botorch_hartmann
from botorch.test_functions.synthetic import Branin as torchBranin
from pydantic.types import PositiveInt

from bofire.benchmarks.benchmark import Benchmark
from bofire.data_models.domain.api import Domain, Inputs, Outputs
from bofire.data_models.constraints.api import NChooseKConstraint
from bofire.data_models.domain.api import Constraints, Domain, Inputs, Outputs
from bofire.data_models.features.api import (
CategoricalDescriptorInput,
CategoricalInput,
Expand Down Expand Up @@ -156,6 +159,60 @@ def get_optima(self) -> pd.DataFrame:
)


class Hartmann(Benchmark):
def __init__(self, dim: int = 6, allowed_k: Optional[int] = None) -> None:
super().__init__()
self._domain = Domain(
input_features=Inputs(
features=[
ContinuousInput(key=f"x_{i}", bounds=(0, 1)) for i in range(dim)
]
),
output_features=Outputs(
features=[ContinuousOutput(key="y", objective=MinimizeObjective())]
),
constraints=Constraints(
constraints=[
NChooseKConstraint(
features=[f"x_{i}" for i in range(dim)],
min_count=0,
max_count=allowed_k,
none_also_valid=True,
)
]
)
if allowed_k
else Constraints(),
)
self._hartmann = botorch_hartmann(dim=dim)

def get_optima(self) -> pd.DataFrame:
if self.dim != 6:
raise ValueError("Only available for dim==6.")
if len(self.domain.constraints) > 0:
raise ValueError("Not defined for NChooseK use case.")
return pd.DataFrame(
columns=[f"x_{i}" for i in range(self.dim)] + ["y"],
data=[[0.20169, 0.150011, 0.476874, 0.275332, 0.311652, 0.6573, -3.32237]],
)

@property
def dim(self) -> int:
return len(self.domain.inputs)

def _f(self, candidates: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y": self._hartmann(
torch.from_numpy(
candidates[[f"x_{i}" for i in range(self.dim)]].values
)
),
"valid_y": [1 for _ in range(len(candidates))],
}
)


class Branin(Benchmark):
def __init__(self) -> None:
self._domain = Domain(
Expand Down
43 changes: 29 additions & 14 deletions bofire/data_models/domain/domain.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def describe_experiments(self, experiments: pd.DataFrame) -> pd.DataFrame:
)

def validate_candidates(
self, candidates: pd.DataFrame, only_inputs: bool = False, tol: float = 1e-6
self, candidates: pd.DataFrame, only_inputs: bool = False, tol: float = 1e-5
) -> pd.DataFrame:
"""Method to check the validty of porposed candidates

Expand All @@ -622,26 +622,41 @@ def validate_candidates(
self.input_features.validate_inputs(candidates)
# check if all constraints are fulfilled
if not self.cnstrs.is_fulfilled(candidates, tol=tol).all():
raise ValueError("Constraints not fulfilled.")
raise ValueError(f"Constraints not fulfilled: {candidates}")
# for each continuous output feature with an attached objective object
if not only_inputs:
assert isinstance(self.output_features, Outputs)
for key in self.output_features.get_keys_by_objective(Objective):
# check that pred, sd, and des cols are specified and numerical
for col in [f"{key}_pred", f"{key}_sd", f"{key}_des"]:
if col not in candidates:
raise ValueError(f"missing column {col}")
if (not is_numeric(candidates[col])) and (
not candidates[col].isnull().to_numpy().all()
):
raise ValueError(
f"not all values of output feature `{key}` are numerical"

cols = list(
itertools.chain.from_iterable(
[
[f"{key}_pred", f"{key}_sd", f"{key}_des"]
for key in self.output_features.get_keys_by_objective(Objective)
]
+ [
[f"{key}_pred", f"{key}_sd"]
for key in self.output_features.get_keys_by_objective(
excludes=Objective, includes=None # type: ignore
)
]
)
)

# check that pred, sd, and des cols are specified and numerical
for col in cols:
if col not in candidates:
raise ValueError(f"missing column {col}")
if (not is_numeric(candidates[col])) and (
not candidates[col].isnull().to_numpy().all()
):
raise ValueError(f"not all values of column `{col}` are numerical")

# validate no additional cols exist
if_count = len(self.get_features(Input))
of_count = len(self.output_features.get_keys_by_objective(Objective))
of_count = len(self.outputs.get_by_objective(includes=Objective))
of_count_w = len(self.outputs.get_by_objective(excludes=Objective, includes=None)) # type: ignore
# input features, prediction, standard deviation and reward for each output feature, 3 additional usefull infos: reward, aquisition function, strategy
if len(candidates.columns) != if_count + 3 * of_count:
if len(candidates.columns) != if_count + 3 * of_count + 2 * of_count_w:
raise ValueError("additional columns found")
return candidates

Expand Down
21 changes: 20 additions & 1 deletion bofire/data_models/strategies/predictives/botorch.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
from typing import Optional
from typing import Optional, Type

from pydantic import PositiveInt, root_validator, validator

from bofire.data_models.constraints.api import (
Constraint,
NonlinearEqualityConstraint,
NonlinearInequalityConstraint,
)
from bofire.data_models.domain.api import Domain, Outputs
from bofire.data_models.enum import CategoricalEncodingEnum, CategoricalMethodEnum
from bofire.data_models.features.api import CategoricalDescriptorInput, CategoricalInput
Expand All @@ -26,6 +31,20 @@ class BotorchStrategy(PredictiveStrategy):
discrete_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
surrogate_specs: Optional[BotorchSurrogates] = None

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy

Args:
my_type (Type[Constraint]): Constraint class

Returns:
bool: True if the constraint type is valid for the strategy chosen, False otherwise
"""
if my_type in [NonlinearInequalityConstraint, NonlinearEqualityConstraint]:
return False
return True

@validator("num_sobol_samples")
def validate_num_sobol_samples(cls, v):
if is_power_of_two(v) is False:
Expand Down
15 changes: 0 additions & 15 deletions bofire/data_models/strategies/predictives/qehvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from pydantic import validator

from bofire.data_models.constraints.api import Constraint, NChooseKConstraint
from bofire.data_models.features.api import Feature
from bofire.data_models.objectives.api import (
MaximizeObjective,
Expand Down Expand Up @@ -33,20 +32,6 @@ def validate_ref_point(cls, v, values):
)
return v

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy

Args:
my_type (Type[Constraint]): Constraint class

Returns:
bool: True if the constraint type is valid for the strategy chosen, False otherwise
"""
if my_type == NChooseKConstraint:
return False
return True

@classmethod
def is_feature_implemented(cls, my_type: Type[Feature]) -> bool:
"""Method to check if a specific feature type is implemented for the strategy
Expand Down
2 changes: 2 additions & 0 deletions bofire/data_models/strategies/predictives/qnehvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pydantic import confloat

from bofire.data_models.objectives.api import (
CloseToTargetObjective,
MaximizeObjective,
MaximizeSigmoidObjective,
MinimizeObjective,
Expand Down Expand Up @@ -33,4 +34,5 @@ def is_objective_implemented(cls, my_type: Type[Objective]) -> bool:
MinimizeSigmoidObjective,
MaximizeSigmoidObjective,
TargetObjective,
CloseToTargetObjective,
]
9 changes: 2 additions & 7 deletions bofire/data_models/strategies/predictives/qparego.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from typing import Literal, Type

from bofire.data_models.constraints.api import Constraint, NChooseKConstraint
from bofire.data_models.features.api import Feature
from bofire.data_models.objectives.api import (
CloseToTargetObjective,
MaximizeObjective,
MaximizeSigmoidObjective,
MinimizeObjective,
Expand All @@ -18,12 +18,6 @@
class QparegoStrategy(MultiobjectiveStrategy):
type: Literal["QparegoStrategy"] = "QparegoStrategy"

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
if my_type == NChooseKConstraint:
return False
return True

@classmethod
def is_feature_implemented(cls, my_type: Type[Feature]) -> bool:
return True
Expand All @@ -36,6 +30,7 @@ def is_objective_implemented(cls, my_type: Type[Objective]) -> bool:
TargetObjective,
MinimizeSigmoidObjective,
MaximizeSigmoidObjective,
CloseToTargetObjective,
]:
return False
return True
19 changes: 0 additions & 19 deletions bofire/data_models/strategies/predictives/sobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
from pydantic import validator

from bofire.data_models.acquisition_functions.api import AnyAcquisitionFunction
from bofire.data_models.constraints.api import (
Constraint,
NonlinearEqualityConstraint,
NonlinearInequalityConstraint,
)
from bofire.data_models.features.api import Feature
from bofire.data_models.objectives.api import BotorchConstrainedObjective, Objective
from bofire.data_models.strategies.predictives.botorch import BotorchStrategy
Expand All @@ -16,20 +11,6 @@
class SoboBaseStrategy(BotorchStrategy):
acquisition_function: AnyAcquisitionFunction

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy

Args:
my_type (Type[Constraint]): Constraint class

Returns:
bool: True if the constraint type is valid for the strategy chosen, False otherwise
"""
if my_type in [NonlinearInequalityConstraint, NonlinearEqualityConstraint]:
return False
return True

@classmethod
def is_feature_implemented(cls, my_type: Type[Feature]) -> bool:
"""Method to check if a specific feature type is implemented for the strategy
Expand Down
Loading