From cc699952659e2b8eb422bb06c6b67178a9e0cd31 Mon Sep 17 00:00:00 2001 From: Daniel Claudino Date: Wed, 26 Jun 2024 19:25:59 +0000 Subject: [PATCH 1/2] Implemented Scipy optimizer plugin Signed-off-by: Daniel Claudino --- quantum/plugins/optimizers/CMakeLists.txt | 3 +- .../plugins/optimizers/scipy/CMakeLists.txt | 58 +++++++ .../plugins/optimizers/scipy/manifest.json | 6 + .../optimizers/scipy/scipy_optimizer.cpp | 162 ++++++++++++++++++ .../optimizers/scipy/scipy_optimizer.hpp | 26 +++ .../optimizers/scipy/tests/CMakeLists.txt | 4 + .../scipy/tests/ScipyOptimizerTester.cpp | 103 +++++++++++ 7 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 quantum/plugins/optimizers/scipy/CMakeLists.txt create mode 100644 quantum/plugins/optimizers/scipy/manifest.json create mode 100644 quantum/plugins/optimizers/scipy/scipy_optimizer.cpp create mode 100644 quantum/plugins/optimizers/scipy/scipy_optimizer.hpp create mode 100644 quantum/plugins/optimizers/scipy/tests/CMakeLists.txt create mode 100644 quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp diff --git a/quantum/plugins/optimizers/CMakeLists.txt b/quantum/plugins/optimizers/CMakeLists.txt index d764e5c20..2d8be5138 100644 --- a/quantum/plugins/optimizers/CMakeLists.txt +++ b/quantum/plugins/optimizers/CMakeLists.txt @@ -1,2 +1,3 @@ add_subdirectory(nlopt-optimizers) -add_subdirectory(mlpack) \ No newline at end of file +add_subdirectory(mlpack) +add_subdirectory(scipy) \ No newline at end of file diff --git a/quantum/plugins/optimizers/scipy/CMakeLists.txt b/quantum/plugins/optimizers/scipy/CMakeLists.txt new file mode 100644 index 000000000..cb5b7a298 --- /dev/null +++ b/quantum/plugins/optimizers/scipy/CMakeLists.txt @@ -0,0 +1,58 @@ +message(STATUS "${BoldGreen}Building Scipy Optimizer.${ColorReset}") +set(LIBRARY_NAME xacc-scipy-optimizer) + +file(GLOB + SRC + scipy_optimizer.cpp) + +usfunctiongetresourcesource(TARGET + ${LIBRARY_NAME} + OUT + SRC) +usfunctiongeneratebundleinit(TARGET + ${LIBRARY_NAME} + OUT + SRC) +#find_package(pybind11 REQUIRED) +find_package(Python COMPONENTS Interpreter Development) + +add_library(${LIBRARY_NAME} SHARED ${SRC}) + +if(Python_FOUND) + message("FOUND python") +endif() +target_include_directories(${LIBRARY_NAME} + PUBLIC . ${CMAKE_SOURCE_DIR}/tpls/pybind11/include ${Python_INCLUDE_DIRS}) + + target_link_libraries(${LIBRARY_NAME} PUBLIC xacc Python::Python) + +set(_bundle_name xacc_optimizer_scipy) +set_target_properties(${LIBRARY_NAME} + PROPERTIES COMPILE_DEFINITIONS + US_BUNDLE_NAME=${_bundle_name} + US_BUNDLE_NAME + ${_bundle_name}) + +usfunctionembedresources(TARGET + ${LIBRARY_NAME} + WORKING_DIRECTORY + ${CMAKE_CURRENT_SOURCE_DIR} + FILES + manifest.json) + +if(APPLE) + set_target_properties(${LIBRARY_NAME} + PROPERTIES INSTALL_RPATH "@loader_path/../lib") + set_target_properties(${LIBRARY_NAME} + PROPERTIES LINK_FLAGS "-undefined dynamic_lookup") +else() + set_target_properties(${LIBRARY_NAME} + PROPERTIES INSTALL_RPATH "$ORIGIN/../lib") + set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS "-shared") +endif() + +if(XACC_BUILD_TESTS) + add_subdirectory(tests) +endif() + +install(TARGETS ${LIBRARY_NAME} DESTINATION ${CMAKE_INSTALL_PREFIX}/plugins) diff --git a/quantum/plugins/optimizers/scipy/manifest.json b/quantum/plugins/optimizers/scipy/manifest.json new file mode 100644 index 000000000..e91935fa7 --- /dev/null +++ b/quantum/plugins/optimizers/scipy/manifest.json @@ -0,0 +1,6 @@ +{ + "bundle.symbolic_name" : "xacc_optimizer_scipy", + "bundle.activator" : true, + "bundle.name" : "XACC Scipy Optimizer", + "bundle.description" : "" +} diff --git a/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp b/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp new file mode 100644 index 000000000..7c617388d --- /dev/null +++ b/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp @@ -0,0 +1,162 @@ +#include "scipy_optimizer.hpp" +#include "Optimizer.hpp" +#include "pybind11/stl.h" +#include "pybind11/numpy.h" +#include "xacc.hpp" +#include "xacc_plugin.hpp" + +namespace py = pybind11; + +namespace xacc { + +const std::string ScipyOptimizer::get_algorithm() const { + std::string optimizerAlgo = "COBYLA"; + if (options.stringExists("algorithm")) { + optimizerAlgo = options.getString("algorithm"); + } + if (options.stringExists("scipy-optimizer")) { + optimizerAlgo = options.getString("scipy-optimizer"); + } + return optimizerAlgo; +} + +const bool ScipyOptimizer::isGradientBased() const { + + std::string optimizerAlgo = "cobyla"; + if (options.stringExists("algorithm")) { + optimizerAlgo = options.getString("algorithm"); + } + if (options.stringExists("scipy-optimizer")) { + optimizerAlgo = options.getString("scipy-optimizer"); + } + + if (options.stringExists("optimizer")) { + optimizerAlgo = options.getString("optimizer"); + } + + if (optimizerAlgo == "bfgs") { + return true; + } else { + return false; + } +} + +OptResult ScipyOptimizer::optimize(OptFunction& function) { + + bool maximize = false; + if (options.keyExists("maximize")) { + xacc::info("Turning on maximize!"); + maximize = options.get("maximize"); + } + + std::string algo = "COBYLA"; + if (options.stringExists("algorithm")) { + algo = options.getString("algorithm"); + } + if (options.stringExists("scipy-optimizer")) { + algo = options.getString("scipy-optimizer"); + } + if (options.stringExists("optimizer")) { + algo = options.getString("optimizer"); + } + + if (algo == "cobyla" || algo == "COBYLA") { + algo ="COBYLA"; + } else if (algo == "nelder-mead" || algo == "Nelder-Mead") { + algo = "Nelder-Mead"; + } else if (algo == "bfgs" || algo == "BFGS" || algo == "l-bfgs") { + algo = "BFGS"; + } else { + xacc::XACCLogger::instance()->error("Invalid optimizer at this time: " + + algo); + } + + double tol = 1e-6; + if (options.keyExists("ftol")) { + tol = options.get("ftol"); + xacc::info("[Scipy] function tolerance set to " + std::to_string(tol)); + } + if (options.keyExists("scipy-ftol")) { + tol = options.get("ftol"); + xacc::info("[Scipy] function tolerance set to " + std::to_string(tol)); + } + + int maxeval = 1000; + if (options.keyExists("maxeval")) { + maxeval = options.get("maxeval"); + xacc::info("[Scipy] max function evaluations set to " + + std::to_string(maxeval)); + } + if (options.keyExists("scipy-maxeval")) { + maxeval = options.get("maxeval"); + xacc::info("[Scipy] max function evaluations set to " + + std::to_string(maxeval)); + } + + std::vector x(function.dimensions()); + if (options.keyExists>("initial-parameters")) { + x = options.get_with_throw>("initial-parameters"); + } else if (options.keyExists>("initial-parameters")) { + auto tmpx = options.get>("initial-parameters"); + x = std::vector(tmpx.begin(), tmpx.end()); + } + + // here the python stuff starts + py::list pyInitialParams; + for (const auto ¶m : x) { + pyInitialParams.append(param); + } + + if (isGradientBased()) std::cout << algo << "\n"; + + // wrap the objective function in this lambda + // scipy passes a numpy array to this function, hence the py::array_t type + py::object pyObjFunction = py::cpp_function([&function](const py::array_t& pyParams) { + std::vector params(pyParams.size()); + std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double)); + return function(std::move(params)); + }); + + // call this for gradient-based optimization + py::object pyObjFunctionWithGrad = py::cpp_function([&function](const py::array_t& pyParams) { + std::vector params(pyParams.size()); + std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double)); + + std::vector grad(params.size()); + double result = function(params, grad); + py::array_t pyGrad(grad.size()); + std::memcpy(pyGrad.mutable_data(), grad.data(), grad.size() * sizeof(double)); + + return py::make_tuple(result, pyGrad); + }); + + py::module scipy_optimize = py::module::import("scipy.optimize"); + + // error handling helps here to see if it's coming from C++ or python + try { + + py::object result = scipy_optimize.attr("minimize")( + isGradientBased() ? pyObjFunctionWithGrad : pyObjFunction, + pyInitialParams, + py::arg("args") = py::tuple(), + py::arg("method") = algo, + py::arg("tol") = tol, + py::arg("jac") = (isGradientBased() ? true : false) + ); + + std::vector optimizedParams = result.attr("x").cast>(); + double optimalValue = result.attr("fun").cast(); + + return {optimalValue, optimizedParams}; + } catch (const py::error_already_set& e) { + std::cerr << "Python error: " << e.what() << std::endl; + throw; + } catch (const std::exception& e) { + std::cerr << "Error: " << e.what() << std::endl; + throw; + } +} +} // namespace xacc + +// Register the plugin with XACC +REGISTER_OPTIMIZER(xacc::ScipyOptimizer) \ No newline at end of file diff --git a/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp b/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp new file mode 100644 index 000000000..49b3e50ab --- /dev/null +++ b/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp @@ -0,0 +1,26 @@ +#ifndef SCIPYOPTIMIZER_HPP +#define SCIPYOPTIMIZER_HPP + +#include +#include +#include +#include + +namespace xacc { + +class ScipyOptimizer : public xacc::Optimizer { +public: + + ScipyOptimizer() = default; + ~ScipyOptimizer() = default; + + const std::string name() const override { return "scipy"; } + const std::string description() const override { return ""; } + + OptResult optimize(OptFunction &function) override; + const bool isGradientBased() const override; + virtual const std::string get_algorithm() const override; +}; + +} // namespace xacc +#endif // SCIPYOPTIMIZER_HPP diff --git a/quantum/plugins/optimizers/scipy/tests/CMakeLists.txt b/quantum/plugins/optimizers/scipy/tests/CMakeLists.txt new file mode 100644 index 000000000..37a7aec7e --- /dev/null +++ b/quantum/plugins/optimizers/scipy/tests/CMakeLists.txt @@ -0,0 +1,4 @@ +include_directories(${CMAKE_SOURCE_DIR}/tpls/pybind11/include ${Python_INCLUDE_DIRS}) + +add_xacc_test(ScipyOptimizer) +target_link_libraries(ScipyOptimizerTester xacc Python::Python) \ No newline at end of file diff --git a/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp b/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp new file mode 100644 index 000000000..f2e3c6315 --- /dev/null +++ b/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp @@ -0,0 +1,103 @@ +#include +#include "xacc.hpp" +#include "xacc_service.hpp" +#include +#include + +using namespace xacc; +namespace py = pybind11; + +TEST(ScipyOptimizerTester, checkSimple) { + + + auto optimizer = + xacc::getService("scipy"); // NLOptimizer optimizer; + + OptFunction f([](const std::vector &x, std::vector& g) { return x[0] * x[0] + 5; }, + 1); + + EXPECT_EQ(optimizer->name(), "scipy"); + EXPECT_EQ(1, f.dimensions()); + + + optimizer->setOptions(HeterogeneousMap{std::make_pair("maxeval", 20)}); + + auto result = optimizer->optimize(f); + EXPECT_NEAR(5.0, result.first, 1.0e-6); + EXPECT_NEAR(result.second[0], 0.0, 1.0e-6); + +} + +TEST(ScipyOptimizerTester, checkGradient) { + + auto optimizer = + xacc::getService("scipy"); + + OptFunction f( + [](const std::vector &x, std::vector &grad) { + if (!grad.empty()) { + std::cout << "GRAD\n"; + grad[0] = 2. * x[0]; + } + auto xx = x[0] * x[0] + 5; + std::cout << xx << "\n"; + return xx; + }, + 1); + + EXPECT_EQ(1, f.dimensions()); + + + optimizer->setOptions( + HeterogeneousMap{std::make_pair("maxeval", 20),std::make_pair("initial-parameters", std::vector{1.0}), + std::make_pair("optimizer", "bfgs")}); + + auto result = optimizer->optimize(f); + + EXPECT_NEAR(result.first, 5.0, 1e-4); + EXPECT_NEAR(result.second[0], 0.0, 1e-4); +} + +TEST(ScipyOptimizerTester, checkGradientRosenbrock) { + + auto optimizer = + xacc::getService("scipy"); + + OptFunction f( + [](const std::vector &x, std::vector &grad) { + if (!grad.empty()) { + // std::cout << "GRAD\n"; + grad[0] = -2 * (1 - x[0]) + 400 * (std::pow(x[0], 3) - x[1] * x[0]); + grad[1] = 200 * (x[1] - std::pow(x[0],2)); + } + auto xx = 100 * std::pow(x[1] - std::pow(x[0], 2), 2) + std::pow(1 - x[0], 2); + std::cout << xx << ", " << x << ", " << grad << "\n"; + + return xx; + }, + 2); + + EXPECT_EQ(2, f.dimensions()); + + optimizer->setOptions( + HeterogeneousMap{std::make_pair("maxeval", 200), + std::make_pair("optimizer", "bfgs")}); + + auto result = optimizer->optimize(f); + + EXPECT_NEAR(result.first, 0.0, 1e-4); + EXPECT_NEAR(result.second[0], 1.0, 1e-4); + EXPECT_NEAR(result.second[1], 1.0, 1e-4); + +} + +int main(int argc, char **argv) { + dlopen("libpython3.8.so", RTLD_LAZY | RTLD_GLOBAL); + xacc::Initialize(argc, argv); + py::initialize_interpreter(); + ::testing::InitGoogleTest(&argc, argv); + auto ret = RUN_ALL_TESTS(); + py::finalize_interpreter(); + xacc::Finalize(); + return ret; +} From 8054735ad2609f785b6d5469e0bbba55a1948c88 Mon Sep 17 00:00:00 2001 From: Daniel Claudino Date: Thu, 27 Jun 2024 10:18:20 +0000 Subject: [PATCH 2/2] Tidying up scipy optimizer Signed-off-by: Daniel Claudino --- CMakeLists.txt | 1 + quantum/plugins/optimizers/CMakeLists.txt | 22 ++++- .../plugins/optimizers/scipy/CMakeLists.txt | 3 - .../optimizers/scipy/scipy_optimizer.cpp | 87 ++++++++++--------- .../optimizers/scipy/scipy_optimizer.hpp | 1 - .../scipy/tests/ScipyOptimizerTester.cpp | 39 ++++----- 6 files changed, 84 insertions(+), 69 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3b65babcc..15452f92d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,6 +21,7 @@ option(XACC_BUILD_EXAMPLES "Build example programs" OFF) option(XACC_ENSMALLEN_INCLUDE_DIR "Path to ensmallen.hpp for mlpack optimizer" "") option(XACC_ARMADILLO_INCLUDE_DIR "Path to armadillo header for mlpack optimizer" "") option(XACC_BUILD_ANNEALING "Build annealing libraries" OFF) +option(XACC_BUILD_SCIPY "Build Scipy optimizer plugin" OFF) if(XACC_BUILD_ANNEALING) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DANNEALING_ENABLED") else() diff --git a/quantum/plugins/optimizers/CMakeLists.txt b/quantum/plugins/optimizers/CMakeLists.txt index 2d8be5138..bcfcf75ec 100644 --- a/quantum/plugins/optimizers/CMakeLists.txt +++ b/quantum/plugins/optimizers/CMakeLists.txt @@ -1,3 +1,23 @@ add_subdirectory(nlopt-optimizers) add_subdirectory(mlpack) -add_subdirectory(scipy) \ No newline at end of file +if(XACC_BUILD_SCIPY) + execute_process(COMMAND ${Python_EXECUTABLE} -c "import scipy" RESULT_VARIABLE SCIPY_EXISTS) + if(SCIPY_EXISTS EQUAL "1") + # if not, check we have pip + execute_process(COMMAND ${Python_EXECUTABLE} -c "import pip" RESULT_VARIABLE PIP_EXISTS) + + if(PIP_EXISTS EQUAL "0") + # we have pip, so just install scipy + message(STATUS "${BoldGreen}Installing Scipy.${ColorReset}") + execute_process(COMMAND ${Python_EXECUTABLE} -m pip install scipy) + else() + # we dont have pip, so warn the user + message(STATUS "${BoldYellow}Scipy not found, but can't install via pip. Ensure you install scipy module if you would like to use the Scipy optimizer.${ColorReset}") + endif() +else() + message(STATUS "${BoldGreen}Found Scipy.${ColorReset}") +endif() + add_subdirectory(scipy) +else() + message(STATUS "${BoldYellow}XACC will not build the Scipy optimizer. You can turn it on with -DXACC_BUILD_SCIPY=ON${ColorReset}") +endif() \ No newline at end of file diff --git a/quantum/plugins/optimizers/scipy/CMakeLists.txt b/quantum/plugins/optimizers/scipy/CMakeLists.txt index cb5b7a298..119ecf551 100644 --- a/quantum/plugins/optimizers/scipy/CMakeLists.txt +++ b/quantum/plugins/optimizers/scipy/CMakeLists.txt @@ -18,9 +18,6 @@ find_package(Python COMPONENTS Interpreter Development) add_library(${LIBRARY_NAME} SHARED ${SRC}) -if(Python_FOUND) - message("FOUND python") -endif() target_include_directories(${LIBRARY_NAME} PUBLIC . ${CMAKE_SOURCE_DIR}/tpls/pybind11/include ${Python_INCLUDE_DIRS}) diff --git a/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp b/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp index 7c617388d..38e4760fe 100644 --- a/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp +++ b/quantum/plugins/optimizers/scipy/scipy_optimizer.cpp @@ -41,11 +41,11 @@ const bool ScipyOptimizer::isGradientBased() const { } } -OptResult ScipyOptimizer::optimize(OptFunction& function) { - +OptResult ScipyOptimizer::optimize(OptFunction &function) { + bool maximize = false; if (options.keyExists("maximize")) { - xacc::info("Turning on maximize!"); + xacc::info("Turning on maximize!"); maximize = options.get("maximize"); } @@ -60,16 +60,16 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) { algo = options.getString("optimizer"); } - if (algo == "cobyla" || algo == "COBYLA") { - algo ="COBYLA"; - } else if (algo == "nelder-mead" || algo == "Nelder-Mead") { - algo = "Nelder-Mead"; - } else if (algo == "bfgs" || algo == "BFGS" || algo == "l-bfgs") { - algo = "BFGS"; - } else { - xacc::XACCLogger::instance()->error("Invalid optimizer at this time: " + - algo); - } + if (algo == "cobyla" || algo == "COBYLA") { + algo = "COBYLA"; + } else if (algo == "nelder-mead" || algo == "Nelder-Mead") { + algo = "Nelder-Mead"; + } else if (algo == "bfgs" || algo == "BFGS" || algo == "l-bfgs") { + algo = "BFGS"; + } else { + xacc::XACCLogger::instance()->error("Invalid optimizer at this time: " + + algo); + } double tol = 1e-6; if (options.keyExists("ftol")) { @@ -104,31 +104,34 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) { // here the python stuff starts py::list pyInitialParams; for (const auto ¶m : x) { - pyInitialParams.append(param); + pyInitialParams.append(param); } - if (isGradientBased()) std::cout << algo << "\n"; - // wrap the objective function in this lambda // scipy passes a numpy array to this function, hence the py::array_t type - py::object pyObjFunction = py::cpp_function([&function](const py::array_t& pyParams) { - std::vector params(pyParams.size()); - std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double)); - return function(std::move(params)); - }); + py::object pyObjFunction = + py::cpp_function([&function](const py::array_t &pyParams) { + std::vector params(pyParams.size()); + std::memcpy(params.data(), pyParams.data(), + pyParams.size() * sizeof(double)); + return function(std::move(params)); + }); // call this for gradient-based optimization - py::object pyObjFunctionWithGrad = py::cpp_function([&function](const py::array_t& pyParams) { - std::vector params(pyParams.size()); - std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double)); - - std::vector grad(params.size()); - double result = function(params, grad); - py::array_t pyGrad(grad.size()); - std::memcpy(pyGrad.mutable_data(), grad.data(), grad.size() * sizeof(double)); - - return py::make_tuple(result, pyGrad); - }); + py::object pyObjFunctionWithGrad = + py::cpp_function([&function](const py::array_t &pyParams) { + std::vector params(pyParams.size()); + std::memcpy(params.data(), pyParams.data(), + pyParams.size() * sizeof(double)); + + std::vector grad(params.size()); + double result = function(params, grad); + py::array_t pyGrad(grad.size()); + std::memcpy(pyGrad.mutable_data(), grad.data(), + grad.size() * sizeof(double)); + + return py::make_tuple(result, pyGrad); + }); py::module scipy_optimize = py::module::import("scipy.optimize"); @@ -141,20 +144,20 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) { py::arg("args") = py::tuple(), py::arg("method") = algo, py::arg("tol") = tol, - py::arg("jac") = (isGradientBased() ? true : false) - ); + py::arg("jac") = (isGradientBased() ? true : false)); - std::vector optimizedParams = result.attr("x").cast>(); + std::vector optimizedParams = + result.attr("x").cast>(); double optimalValue = result.attr("fun").cast(); return {optimalValue, optimizedParams}; - } catch (const py::error_already_set& e) { - std::cerr << "Python error: " << e.what() << std::endl; - throw; - } catch (const std::exception& e) { - std::cerr << "Error: " << e.what() << std::endl; - throw; - } + } catch (const py::error_already_set &e) { + std::cerr << "Python error: " << e.what() << std::endl; + throw; + } catch (const std::exception &e) { + std::cerr << "Error: " << e.what() << std::endl; + throw; + } } } // namespace xacc diff --git a/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp b/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp index 49b3e50ab..25235c3b6 100644 --- a/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp +++ b/quantum/plugins/optimizers/scipy/scipy_optimizer.hpp @@ -10,7 +10,6 @@ namespace xacc { class ScipyOptimizer : public xacc::Optimizer { public: - ScipyOptimizer() = default; ~ScipyOptimizer() = default; diff --git a/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp b/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp index f2e3c6315..8b3424303 100644 --- a/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp +++ b/quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp @@ -9,29 +9,26 @@ namespace py = pybind11; TEST(ScipyOptimizerTester, checkSimple) { - auto optimizer = - xacc::getService("scipy"); // NLOptimizer optimizer; + xacc::getService("scipy"); - OptFunction f([](const std::vector &x, std::vector& g) { return x[0] * x[0] + 5; }, + OptFunction f([](const std::vector &x, + std::vector &g) { return x[0] * x[0] + 5; }, 1); EXPECT_EQ(optimizer->name(), "scipy"); EXPECT_EQ(1, f.dimensions()); - optimizer->setOptions(HeterogeneousMap{std::make_pair("maxeval", 20)}); - + auto result = optimizer->optimize(f); EXPECT_NEAR(5.0, result.first, 1.0e-6); EXPECT_NEAR(result.second[0], 0.0, 1.0e-6); - } TEST(ScipyOptimizerTester, checkGradient) { - auto optimizer = - xacc::getService("scipy"); + auto optimizer = xacc::getService("scipy"); OptFunction f( [](const std::vector &x, std::vector &grad) { @@ -47,10 +44,10 @@ TEST(ScipyOptimizerTester, checkGradient) { EXPECT_EQ(1, f.dimensions()); - - optimizer->setOptions( - HeterogeneousMap{std::make_pair("maxeval", 20),std::make_pair("initial-parameters", std::vector{1.0}), - std::make_pair("optimizer", "bfgs")}); + optimizer->setOptions(HeterogeneousMap{ + std::make_pair("maxeval", 20), + std::make_pair("initial-parameters", std::vector{1.0}), + std::make_pair("optimizer", "bfgs")}); auto result = optimizer->optimize(f); @@ -60,35 +57,33 @@ TEST(ScipyOptimizerTester, checkGradient) { TEST(ScipyOptimizerTester, checkGradientRosenbrock) { - auto optimizer = - xacc::getService("scipy"); + auto optimizer = xacc::getService("scipy"); OptFunction f( [](const std::vector &x, std::vector &grad) { if (!grad.empty()) { - // std::cout << "GRAD\n"; + // std::cout << "GRAD\n"; grad[0] = -2 * (1 - x[0]) + 400 * (std::pow(x[0], 3) - x[1] * x[0]); - grad[1] = 200 * (x[1] - std::pow(x[0],2)); + grad[1] = 200 * (x[1] - std::pow(x[0], 2)); } - auto xx = 100 * std::pow(x[1] - std::pow(x[0], 2), 2) + std::pow(1 - x[0], 2); + auto xx = + 100 * std::pow(x[1] - std::pow(x[0], 2), 2) + std::pow(1 - x[0], 2); std::cout << xx << ", " << x << ", " << grad << "\n"; - + return xx; }, 2); EXPECT_EQ(2, f.dimensions()); - optimizer->setOptions( - HeterogeneousMap{std::make_pair("maxeval", 200), - std::make_pair("optimizer", "bfgs")}); + optimizer->setOptions(HeterogeneousMap{std::make_pair("maxeval", 200), + std::make_pair("optimizer", "bfgs")}); auto result = optimizer->optimize(f); EXPECT_NEAR(result.first, 0.0, 1e-4); EXPECT_NEAR(result.second[0], 1.0, 1e-4); EXPECT_NEAR(result.second[1], 1.0, 1e-4); - } int main(int argc, char **argv) {