Skip to content

Commit

Permalink
Tidying up scipy optimizer
Browse files Browse the repository at this point in the history
Signed-off-by: Daniel Claudino <claudinodc@ornl.gov>
  • Loading branch information
danclaudino committed Jun 27, 2024
1 parent cc69995 commit 8054735
Show file tree
Hide file tree
Showing 6 changed files with 84 additions and 69 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ option(XACC_BUILD_EXAMPLES "Build example programs" OFF)
option(XACC_ENSMALLEN_INCLUDE_DIR "Path to ensmallen.hpp for mlpack optimizer" "")
option(XACC_ARMADILLO_INCLUDE_DIR "Path to armadillo header for mlpack optimizer" "")
option(XACC_BUILD_ANNEALING "Build annealing libraries" OFF)
option(XACC_BUILD_SCIPY "Build Scipy optimizer plugin" OFF)
if(XACC_BUILD_ANNEALING)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DANNEALING_ENABLED")
else()
Expand Down
22 changes: 21 additions & 1 deletion quantum/plugins/optimizers/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,23 @@
add_subdirectory(nlopt-optimizers)
add_subdirectory(mlpack)
add_subdirectory(scipy)
if(XACC_BUILD_SCIPY)
execute_process(COMMAND ${Python_EXECUTABLE} -c "import scipy" RESULT_VARIABLE SCIPY_EXISTS)
if(SCIPY_EXISTS EQUAL "1")
# if not, check we have pip
execute_process(COMMAND ${Python_EXECUTABLE} -c "import pip" RESULT_VARIABLE PIP_EXISTS)

if(PIP_EXISTS EQUAL "0")
# we have pip, so just install scipy
message(STATUS "${BoldGreen}Installing Scipy.${ColorReset}")
execute_process(COMMAND ${Python_EXECUTABLE} -m pip install scipy)
else()
# we dont have pip, so warn the user
message(STATUS "${BoldYellow}Scipy not found, but can't install via pip. Ensure you install scipy module if you would like to use the Scipy optimizer.${ColorReset}")
endif()
else()
message(STATUS "${BoldGreen}Found Scipy.${ColorReset}")
endif()
add_subdirectory(scipy)
else()
message(STATUS "${BoldYellow}XACC will not build the Scipy optimizer. You can turn it on with -DXACC_BUILD_SCIPY=ON${ColorReset}")
endif()
3 changes: 0 additions & 3 deletions quantum/plugins/optimizers/scipy/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ find_package(Python COMPONENTS Interpreter Development)

add_library(${LIBRARY_NAME} SHARED ${SRC})

if(Python_FOUND)
message("FOUND python")
endif()
target_include_directories(${LIBRARY_NAME}
PUBLIC . ${CMAKE_SOURCE_DIR}/tpls/pybind11/include ${Python_INCLUDE_DIRS})

Expand Down
87 changes: 45 additions & 42 deletions quantum/plugins/optimizers/scipy/scipy_optimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ const bool ScipyOptimizer::isGradientBased() const {
}
}

OptResult ScipyOptimizer::optimize(OptFunction& function) {
OptResult ScipyOptimizer::optimize(OptFunction &function) {

bool maximize = false;
if (options.keyExists<bool>("maximize")) {
xacc::info("Turning on maximize!");
xacc::info("Turning on maximize!");
maximize = options.get<bool>("maximize");
}

Expand All @@ -60,16 +60,16 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) {
algo = options.getString("optimizer");
}

if (algo == "cobyla" || algo == "COBYLA") {
algo ="COBYLA";
} else if (algo == "nelder-mead" || algo == "Nelder-Mead") {
algo = "Nelder-Mead";
} else if (algo == "bfgs" || algo == "BFGS" || algo == "l-bfgs") {
algo = "BFGS";
} else {
xacc::XACCLogger::instance()->error("Invalid optimizer at this time: " +
algo);
}
if (algo == "cobyla" || algo == "COBYLA") {
algo = "COBYLA";
} else if (algo == "nelder-mead" || algo == "Nelder-Mead") {
algo = "Nelder-Mead";
} else if (algo == "bfgs" || algo == "BFGS" || algo == "l-bfgs") {
algo = "BFGS";
} else {
xacc::XACCLogger::instance()->error("Invalid optimizer at this time: " +
algo);
}

double tol = 1e-6;
if (options.keyExists<double>("ftol")) {
Expand Down Expand Up @@ -104,31 +104,34 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) {
// here the python stuff starts
py::list pyInitialParams;
for (const auto &param : x) {
pyInitialParams.append(param);
pyInitialParams.append(param);
}

if (isGradientBased()) std::cout << algo << "\n";

// wrap the objective function in this lambda
// scipy passes a numpy array to this function, hence the py::array_t type
py::object pyObjFunction = py::cpp_function([&function](const py::array_t<double>& pyParams) {
std::vector<double> params(pyParams.size());
std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double));
return function(std::move(params));
});
py::object pyObjFunction =
py::cpp_function([&function](const py::array_t<double> &pyParams) {
std::vector<double> params(pyParams.size());
std::memcpy(params.data(), pyParams.data(),
pyParams.size() * sizeof(double));
return function(std::move(params));
});

// call this for gradient-based optimization
py::object pyObjFunctionWithGrad = py::cpp_function([&function](const py::array_t<double>& pyParams) {
std::vector<double> params(pyParams.size());
std::memcpy(params.data(), pyParams.data(), pyParams.size() * sizeof(double));

std::vector<double> grad(params.size());
double result = function(params, grad);
py::array_t<double> pyGrad(grad.size());
std::memcpy(pyGrad.mutable_data(), grad.data(), grad.size() * sizeof(double));

return py::make_tuple(result, pyGrad);
});
py::object pyObjFunctionWithGrad =
py::cpp_function([&function](const py::array_t<double> &pyParams) {
std::vector<double> params(pyParams.size());
std::memcpy(params.data(), pyParams.data(),
pyParams.size() * sizeof(double));

std::vector<double> grad(params.size());
double result = function(params, grad);
py::array_t<double> pyGrad(grad.size());
std::memcpy(pyGrad.mutable_data(), grad.data(),
grad.size() * sizeof(double));

return py::make_tuple(result, pyGrad);
});

py::module scipy_optimize = py::module::import("scipy.optimize");

Expand All @@ -141,20 +144,20 @@ OptResult ScipyOptimizer::optimize(OptFunction& function) {
py::arg("args") = py::tuple(),
py::arg("method") = algo,
py::arg("tol") = tol,
py::arg("jac") = (isGradientBased() ? true : false)
);
py::arg("jac") = (isGradientBased() ? true : false));

std::vector<double> optimizedParams = result.attr("x").cast<std::vector<double>>();
std::vector<double> optimizedParams =
result.attr("x").cast<std::vector<double>>();
double optimalValue = result.attr("fun").cast<double>();

return {optimalValue, optimizedParams};
} catch (const py::error_already_set& e) {
std::cerr << "Python error: " << e.what() << std::endl;
throw;
} catch (const std::exception& e) {
std::cerr << "Error: " << e.what() << std::endl;
throw;
}
} catch (const py::error_already_set &e) {
std::cerr << "Python error: " << e.what() << std::endl;
throw;
} catch (const std::exception &e) {
std::cerr << "Error: " << e.what() << std::endl;
throw;
}
}
} // namespace xacc

Expand Down
1 change: 0 additions & 1 deletion quantum/plugins/optimizers/scipy/scipy_optimizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ namespace xacc {

class ScipyOptimizer : public xacc::Optimizer {
public:

ScipyOptimizer() = default;
~ScipyOptimizer() = default;

Expand Down
39 changes: 17 additions & 22 deletions quantum/plugins/optimizers/scipy/tests/ScipyOptimizerTester.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,29 +9,26 @@ namespace py = pybind11;

TEST(ScipyOptimizerTester, checkSimple) {


auto optimizer =
xacc::getService<Optimizer>("scipy"); // NLOptimizer optimizer;
xacc::getService<Optimizer>("scipy");

OptFunction f([](const std::vector<double> &x, std::vector<double>& g) { return x[0] * x[0] + 5; },
OptFunction f([](const std::vector<double> &x,
std::vector<double> &g) { return x[0] * x[0] + 5; },
1);

EXPECT_EQ(optimizer->name(), "scipy");
EXPECT_EQ(1, f.dimensions());


optimizer->setOptions(HeterogeneousMap{std::make_pair("maxeval", 20)});

auto result = optimizer->optimize(f);
EXPECT_NEAR(5.0, result.first, 1.0e-6);
EXPECT_NEAR(result.second[0], 0.0, 1.0e-6);

}

TEST(ScipyOptimizerTester, checkGradient) {

auto optimizer =
xacc::getService<Optimizer>("scipy");
auto optimizer = xacc::getService<Optimizer>("scipy");

OptFunction f(
[](const std::vector<double> &x, std::vector<double> &grad) {
Expand All @@ -47,10 +44,10 @@ TEST(ScipyOptimizerTester, checkGradient) {

EXPECT_EQ(1, f.dimensions());


optimizer->setOptions(
HeterogeneousMap{std::make_pair("maxeval", 20),std::make_pair("initial-parameters", std::vector<double>{1.0}),
std::make_pair("optimizer", "bfgs")});
optimizer->setOptions(HeterogeneousMap{
std::make_pair("maxeval", 20),
std::make_pair("initial-parameters", std::vector<double>{1.0}),
std::make_pair("optimizer", "bfgs")});

auto result = optimizer->optimize(f);

Expand All @@ -60,35 +57,33 @@ TEST(ScipyOptimizerTester, checkGradient) {

TEST(ScipyOptimizerTester, checkGradientRosenbrock) {

auto optimizer =
xacc::getService<Optimizer>("scipy");
auto optimizer = xacc::getService<Optimizer>("scipy");

OptFunction f(
[](const std::vector<double> &x, std::vector<double> &grad) {
if (!grad.empty()) {
// std::cout << "GRAD\n";
// std::cout << "GRAD\n";
grad[0] = -2 * (1 - x[0]) + 400 * (std::pow(x[0], 3) - x[1] * x[0]);
grad[1] = 200 * (x[1] - std::pow(x[0],2));
grad[1] = 200 * (x[1] - std::pow(x[0], 2));
}
auto xx = 100 * std::pow(x[1] - std::pow(x[0], 2), 2) + std::pow(1 - x[0], 2);
auto xx =
100 * std::pow(x[1] - std::pow(x[0], 2), 2) + std::pow(1 - x[0], 2);
std::cout << xx << ", " << x << ", " << grad << "\n";

return xx;
},
2);

EXPECT_EQ(2, f.dimensions());

optimizer->setOptions(
HeterogeneousMap{std::make_pair("maxeval", 200),
std::make_pair("optimizer", "bfgs")});
optimizer->setOptions(HeterogeneousMap{std::make_pair("maxeval", 200),
std::make_pair("optimizer", "bfgs")});

auto result = optimizer->optimize(f);

EXPECT_NEAR(result.first, 0.0, 1e-4);
EXPECT_NEAR(result.second[0], 1.0, 1e-4);
EXPECT_NEAR(result.second[1], 1.0, 1e-4);

}

int main(int argc, char **argv) {
Expand Down

0 comments on commit 8054735

Please sign in to comment.