diff --git a/Project.toml b/Project.toml index 8629effb..8d4ad86b 100644 --- a/Project.toml +++ b/Project.toml @@ -32,10 +32,7 @@ julia = "1.6" [extras] BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" DelayDiffEq = "bcd4f6db-9728-5f36-b5f7-82caef46ccdb" -DiffEqBase = "2b5f629d-d688-5b77-993f-72d75c75574e" -FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" -LeastSquaresOptim = "0fc2ff8b-aaa3-5acd-a817-1944a5e08891" NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" Optim = "429524aa-4258-5aef-a3af-852621145aeb" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" @@ -53,4 +50,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [targets] -test = ["Test", "BlackBoxOptim", "DelayDiffEq", "LeastSquaresOptim", "NLopt", "Optim", "OrdinaryDiffEq", "ParameterizedFunctions", "Random", "StochasticDiffEq", "SteadyStateDiffEq"] +test = ["Test", "BlackBoxOptim", "DelayDiffEq", "ForwardDiff", "NLopt", "Optim", "Optimization", "OptimizationBBO", "OptimizationNLopt", "OptimizationOptimJL", "OrdinaryDiffEq", "ParameterizedFunctions", "Random", "SciMLSensitivity", "StochasticDiffEq", "SteadyStateDiffEq", "Sundials", "Zygote"] diff --git a/test/dae_tests.jl b/test/dae_tests.jl index a24a6919..03f5adbc 100644 --- a/test/dae_tests.jl +++ b/test/dae_tests.jl @@ -17,8 +17,8 @@ t = collect(range(0, stop = 10, length = 30)) randomized = VectorOfArray([(sol(t[i]) + 0.003randn(3)) for i in 1:length(t)]) data = convert(Array, randomized) -using DiffEqParamEstim, OptimizationNLopt, OptimizationOptimJL, ForwardDiff, FiniteDiff, - Zygote, Optimization, SciMLSensitivity +using DiffEqParamEstim, OptimizationNLopt, OptimizationOptimJL, ForwardDiff, Zygote, + Optimization, SciMLSensitivity cost_function = build_loss_objective(prob, DFBDF(), L2Loss(t, data), Optimization.AutoZygote(), abstol = 1e-8, reltol = 1e-8, verbose = false) @@ -30,7 +30,8 @@ cost_function = build_loss_objective(prob, DFBDF(), L2Loss(t, data), reltol = 1e-8, verbose = false) optprob = OptimizationProblem(cost_function, [0.01]; lb = [0.0], ub = [1.0]) res = solve(optprob, OptimizationOptimJL.BFGS()) -opt = Opt(:GN_ESCH, 1) +@test res.u[1]≈0.04 atol=5e-3 -res = solve(optprob, opt) -# @test minx[1]≈0.04 atol=5e-3 +# opt = Opt(:GN_ESCH, 1) +# res = solve(optprob, opt) +# @test res.u[1]≈0.04 atol=5e-3 diff --git a/test/dde_tests.jl b/test/dde_tests.jl index d529f8c9..882de56e 100644 --- a/test/dde_tests.jl +++ b/test/dde_tests.jl @@ -37,4 +37,4 @@ optprob = OptimizationProblem(cost_function, [1.0], lb = [0.0], ub = [1.0]) opt = Opt(:GN_ESCH, 1) res = solve(optprob, BFGS()) -@test minx[1]≈0.5 atol=5e-3 +@test res.u[1]≈0.5 atol=5e-3 diff --git a/test/likelihood.jl b/test/likelihood.jl index 6eda13c6..88f2279d 100644 --- a/test/likelihood.jl +++ b/test/likelihood.jl @@ -22,7 +22,7 @@ distributions = [fit_mle(Normal, aggregate_data[i, j, :]) for i in 1:2, j in 1:2 obj = build_loss_objective(prob1, Tsit5(), LogLikeLoss(t, distributions), maxiters = 10000, verbose = false) -optprob = OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) +optprob = Optimization.OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) result = solve(optprob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 11e3) @test result.original.archive_output.best_candidate≈[1.5, 1.0] atol=1e-1 @@ -34,7 +34,7 @@ obj = build_loss_objective(prob1, Tsit5(), LogLikeLoss(t, data_distributions, diff_distributions), Optimization.AutoForwardDiff(), maxiters = 10000, verbose = false) -optprob = OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) +optprob = Optimization.OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) result = solve(optprob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 11e3) @test result.original.archive_output.best_candidate≈[1.5, 1.0] atol=1e-1 @@ -46,10 +46,11 @@ obj = build_loss_objective(prob1, Tsit5(), LogLikeLoss(t, data_distributions, diff_distributions, 0.3), Optimization.AutoForwardDiff(), maxiters = 10000, verbose = false) -optprob = OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) +optprob = Optimization.OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) result = solve(optprob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 11e3) +@test result.u≈[1.5, 1.0] atol=1e-1 using OptimizationBBO.BlackBoxOptim -bboptimize((x) -> obj(x, nothing), SearchRange = bound1) +result = bboptimize(obj, SearchRange = [(0.5, 5.0), (0.5, 5.0)], MaxSteps = 11e3) @test result.archive_output.best_candidate≈[1.5, 1.0] atol=1e-1 distributions = [fit_mle(MvNormal, aggregate_data[:, j, :]) for j in 1:200] @@ -61,6 +62,6 @@ obj = build_loss_objective(prob1, Tsit5(), LogLikeLoss(t, distributions, diff_distributions), Optimization.AutoForwardDiff(), maxiters = 10000, verbose = false, priors = priors) -optprob = OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) +optprob = Optimization.OptimizationProblem(obj, [2.0, 2.0], lb = [0.5, 0.5], ub = [5.0, 5.0]) result = solve(optprob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 11e3) -@test result.archive_output.best_candidate≈[1.5, 1.0] atol=1e-1 +@test result.u≈[1.5, 1.0] atol=1e-1 diff --git a/test/out_of_place_odes.jl b/test/out_of_place_odes.jl index d38cc81c..f973828e 100644 --- a/test/out_of_place_odes.jl +++ b/test/out_of_place_odes.jl @@ -1,4 +1,4 @@ -using OrdinaryDiffEq, Test, SciMLSensitivity +using OrdinaryDiffEq, Test, SciMLSensitivity, Optimization, OptimizationOptimJL function LotkaVolterraTest_not_inplace(u, a, t) b, c, d = 1.0, 3.0, 1.0 @@ -26,7 +26,7 @@ soll = solve(prob, Tsit5()) cost_function = build_loss_objective(prob, Tsit5(), L2Loss(t, data), Optimization.AutoZygote(), maxiters = 10000, verbose = false) -optprob = OptimizationProblem(cost_function, [1.0], lb = [0.0], ub = [10.0]) +optprob = Optimization.OptimizationProblem(cost_function, [1.0], lb = [0.0], ub = [10.0]) sol = solve(optprob, BFGS()) # two-stage OOP regression test diff --git a/test/test_on_monte.jl b/test/test_on_monte.jl index d25a3365..5d990109 100644 --- a/test/test_on_monte.jl +++ b/test/test_on_monte.jl @@ -1,5 +1,5 @@ using DiffEqParamEstim, OrdinaryDiffEq, StochasticDiffEq, ParameterizedFunctions, - DiffEqBase, RecursiveArrayTools + DiffEqBase, RecursiveArrayTools, OptimizationOptimJL, Zygote using Test pf_func = function (du, u, p, t) @@ -18,12 +18,11 @@ randomized = VectorOfArray([(sol(t[i]) + 0.01randn(2)) for i in 1:length(t)]) data = convert(Array, randomized) monte_prob = EnsembleProblem(prob) -obj = build_loss_objective(monte_prob, Tsit5(), L2Loss(t, data), maxiters = 10000, - Optimization.AutoZygote(), +obj = build_loss_objective(monte_prob, Tsit5(), L2Loss(t, data), Optimization.AutoForwardDiff(), maxiters = 10000, abstol = 1e-8, reltol = 1e-8, verbose = false, trajectories = 25) optprob = OptimizationProblem(obj, [1.3, 0.8]) -result = solve(obj, Optim.BFGS()) +result = solve(optprob, Optim.BFGS()) @test result.u≈[1.5, 1.0] atol=3e-1 pg_func = function (du, u, p, t) diff --git a/test/tests_on_odes/nlopt_test.jl b/test/tests_on_odes/nlopt_test.jl index f560813c..9bfaf7ab 100644 --- a/test/tests_on_odes/nlopt_test.jl +++ b/test/tests_on_odes/nlopt_test.jl @@ -1,4 +1,4 @@ -using OptimizationNLopt +using OptimizationNLopt, Zygote println("Use NLOpt to fit the parameter") @@ -28,15 +28,11 @@ res = solve(optprob, opt) # test differentiation -for adtype in (Optimization.AutoZygote(), SciMLBase.NoAD()) - global obj = build_loss_objective(prob1, Tsit5(), L2Loss(t, data), adtype; - maxiters = 10000) - - global opt = Opt(:LD_MMA, 1) - xtol_rel!(opt, 1e-3) - maxeval!(opt, 10000) - global minf, minx, ret - optprob = OptimizationNLopt.OptimizationProblem(obj, [1.4]) - res = solve(optprob, opt) - @test res.u[1]≈1.5 atol=1e-1 -end +obj = build_loss_objective(prob1, Tsit5(), L2Loss(t, data), Optimization.AutoForwardDiff(); + maxiters = 10000) #zygote behaves weirdly here +opt = Opt(:LD_MMA, 1) +xtol_rel!(opt, 1e-3) +maxeval!(opt, 10000) +optprob = OptimizationNLopt.OptimizationProblem(obj, [1.3]) +res = solve(optprob, opt) +@test res.u[1]≈1.5 atol=1e-1 diff --git a/test/tests_on_odes/regularization_test.jl b/test/tests_on_odes/regularization_test.jl index 30df3793..0ae12680 100644 --- a/test/tests_on_odes/regularization_test.jl +++ b/test/tests_on_odes/regularization_test.jl @@ -1,4 +1,4 @@ -using PenaltyFunctions, OptimizationOptimJL, LinearAlgebra +using PenaltyFunctions, OptimizationOptimJL, LinearAlgebra, SciMLSensitivity cost_function_1 = build_loss_objective(prob1, Tsit5(), L2Loss(t, data), Optimization.AutoZygote(), @@ -20,14 +20,14 @@ cost_function_3 = build_loss_objective(prob3, Tsit5(), L2Loss(t, data), maxiters = 10000) println("Use Optim BFGS to fit the parameter") -optprob = OptimizationNLopt.OptimizationProblem(cost_function_1, [1.0]) +optprob = Optimization.OptimizationProblem(cost_function_1, [1.0]) result = solve(optprob, Optim.BFGS()) @test result.u[1]≈1.5 atol=3e-1 -optprob = OptimizationProblem(cost_function_2, [1.2, 2.7]) +optprob = Optimization.OptimizationProblem(cost_function_2, [1.2, 2.7]) result = solve(optprob, Optim.BFGS()) @test result.minimizer≈[1.5; 3.0] atol=3e-1 -optprob = OptimizationProblem(cost_function_3, [1.3, 0.8, 2.8, 1.2]) +optprob = Optimization.OptimizationProblem(cost_function_3, [1.3, 0.8, 2.8, 1.2]) result = solve(optprob, Optim.BFGS()) @test result.minimizer≈[1.5; 1.0; 3.0; 1.0] atol=5e-1 diff --git a/test/tests_on_odes/test_problems.jl b/test/tests_on_odes/test_problems.jl index 6dd407d0..530b5fa6 100644 --- a/test/tests_on_odes/test_problems.jl +++ b/test/tests_on_odes/test_problems.jl @@ -1,4 +1,4 @@ -using OrdinaryDiffEq, ParameterizedFunctions, DiffEqBase, RecursiveArrayTools +using OrdinaryDiffEq, ParameterizedFunctions, RecursiveArrayTools # Here are the problems to solve diff --git a/test/tests_on_odes/two_stage_method_test.jl b/test/tests_on_odes/two_stage_method_test.jl index 3635728f..4ec2d82f 100644 --- a/test/tests_on_odes/two_stage_method_test.jl +++ b/test/tests_on_odes/two_stage_method_test.jl @@ -1,4 +1,4 @@ -using Optim, NLopt +using Optim, NLopt, OptimizationOptimJL println("Use Two Stage Method to fit the parameter")