diff --git a/optimas/generators/xopt/__init__.py b/optimas/generators/xopt/__init__.py new file mode 100644 index 00000000..611f86b0 --- /dev/null +++ b/optimas/generators/xopt/__init__.py @@ -0,0 +1 @@ +from .nelder_mead import NelderMeadGenerator diff --git a/optimas/generators/xopt/nelder_mead.py b/optimas/generators/xopt/nelder_mead.py new file mode 100644 index 00000000..48d935ee --- /dev/null +++ b/optimas/generators/xopt/nelder_mead.py @@ -0,0 +1,74 @@ +from typing import List, Optional + +import numpy as np +import pandas as pd + +from xopt import VOCS +from xopt.generators.scipy.neldermead import ( + NelderMeadGenerator as XoptNelderMeadGenerator, +) + +from optimas.core import ( + Objective, + VaryingParameter, + Parameter, + Evaluation, + Trial, +) +from optimas.generators.base import Generator + + +class NelderMeadGenerator(Generator): + def __init__( + self, + varying_parameters: List[VaryingParameter], + objectives: List[Objective], + analyzed_parameters: Optional[List[Parameter]] = None, + save_model: Optional[bool] = False, + model_save_period: Optional[int] = 5, + model_history_dir: Optional[str] = "model_history", + ) -> None: + super().__init__( + varying_parameters=varying_parameters, + objectives=objectives, + analyzed_parameters=analyzed_parameters, + save_model=save_model, + model_save_period=model_save_period, + model_history_dir=model_history_dir, + ) + self._create_xopt_generator() + + def _ask(self, trials: List[Trial]) -> List[Trial]: + n_trials = len(trials) + xopt_trials = self.xopt_gen.generate(n_trials) + if xopt_trials: + for trial, xopt_trial in zip(trials, xopt_trials): + trial.parameter_values = [ + xopt_trial[var.name] for var in self.varying_parameters + ] + return trials + + def _tell(self, trials: List[Trial]) -> None: + # pd.DataFrame({"x1": [0.5], "x2": [5.0], "y1": [0.5], "c1": [0.5]}) + for trial in trials: + data = {} + for oe in trial.objective_evaluations: + data[oe.parameter.name] = [oe.value] + df = pd.DataFrame(data) + self.xopt_gen.add_data(df) + + def _create_xopt_generator(self): + variables = {} + for var in self.varying_parameters: + variables[var.name] = [var.lower_bound, var.upper_bound] + objectives = {} + for objective in self.objectives: + name = objective.name + objectives[name] = "MINIMIZE" if objective.minimize else "MAXIMIZE" + vocs = VOCS(variables=variables, objectives=objectives) + initial_point = {} + for var in self.varying_parameters: + initial_point[var.name] = (var.lower_bound + var.upper_bound) / 2 + self.xopt_gen = XoptNelderMeadGenerator( + vocs=vocs, initial_point=initial_point + ) diff --git a/tests/test_neldermead.py b/tests/test_neldermead.py new file mode 100644 index 00000000..dcccb35e --- /dev/null +++ b/tests/test_neldermead.py @@ -0,0 +1,68 @@ +import numpy as np + +from optimas.explorations import Exploration +from optimas.generators.xopt import NelderMeadGenerator +from optimas.evaluators import FunctionEvaluator +from optimas.core import VaryingParameter, Objective + + +def eval_func(input_params, output_params): + """Evaluation function for single-fidelity test""" + x0 = input_params["x0"] + x1 = input_params["x1"] + result = -(x0 + 10 * np.cos(x0)) * (x1 + 5 * np.cos(x1)) + output_params["f"] = result + + +def test_neldermead(): + """Test that grid sampling generates the expected configurations.""" + + # Create varying parameters. + names = ["x0", "x1"] + lower_bounds = [-3.0, 2.0] + upper_bounds = [1.0, 5.0] + vars = [] + n_steps = [7, 15] + for name, lb, ub in zip(names, lower_bounds, upper_bounds): + vars.append(VaryingParameter(name, lb, ub)) + + # Set number of evaluations. + n_evals = np.prod(n_steps) + + # Define objective. + obj = Objective("f", minimize=False) + + # Create generator and run exploration. + gen = NelderMeadGenerator(varying_parameters=vars, objectives=[obj]) + ev = FunctionEvaluator(function=eval_func) + exploration = Exploration( + generator=gen, + evaluator=ev, + max_evals=n_evals, + sim_workers=1, + exploration_dir_path="./tests_output/test_neldermead", + ) + exploration.run() + + # Get generated points. + h = exploration.history + h = h[h["sim_ended"]] + x0_gen = h["x0"] + x1_gen = h["x1"] + + # Get expected 1D steps along each variable. + x0_steps = np.linspace(lower_bounds[0], upper_bounds[0], n_steps[0]) + x1_steps = np.linspace(lower_bounds[1], upper_bounds[1], n_steps[1]) + + # Check that the scan along each variable is as expected. + np.testing.assert_array_equal(np.unique(x0_gen), x0_steps) + np.testing.assert_array_equal(np.unique(x1_gen), x1_steps) + + # Check that for every x0 step, the expected x1 steps are performed. + for x0_step in x0_steps: + x1_in_x0_step = x1_gen[x0_gen == x0_step] + np.testing.assert_array_equal(x1_in_x0_step, x1_steps) + + +if __name__ == "__main__": + test_neldermead()