Skip to content

Commit

Permalink
Add accessor functions
Browse files Browse the repository at this point in the history
  • Loading branch information
tkf committed Sep 9, 2019
1 parent d6a6d75 commit 81b506e
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 3 deletions.
19 changes: 19 additions & 0 deletions docs/src/user/minimization.md
Original file line number Diff line number Diff line change
Expand Up @@ -219,3 +219,22 @@ line search errors if `initial_x` is a stationary point. Notice, that this is on
a first order check. If `initial_x` is any type of stationary point, `g_converged`
will be true. This includes local minima, saddle points, and local maxima. If `iterations` is `0`
and `g_converged` is `true`, the user needs to keep this point in mind.

## Iterator interface
For multivariable optimizations, iterator interface is provided through `Optim.optimizing`
function. Using this interface, `optimize(args...; kwargs...)` is equivalent to

```jl
let istate
for istate′ in Optim.optimizing(args...; kwargs...)
istate = istate′
end
Optim.OptimizationResults(istate)
end
```

The iterator returned by `Optim.optimizing` yields an iterator state for each iteration
step.

Functions that can be called on the result object (e.g. `minimizer`, `iterations`; see
[Complete list of functions](@ref)) can be used on the iteration state `istate`.
31 changes: 31 additions & 0 deletions src/api.jl
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,34 @@ rel_tol(r::OptimizationResults) = error("rel_tol is not implemented for $(summar
rel_tol(r::UnivariateOptimizationResults) = r.rel_tol
abs_tol(r::OptimizationResults) = error("abs_tol is not implemented for $(summary(r)).")
abs_tol(r::UnivariateOptimizationResults) = r.abs_tol


# Derive `IteratorState` accessors from `MultivariateOptimizationResults` accessors.

# Result accessors that does _not_ need to run `after_while!`:
for f in [
:(Base.summary)
:iterations
:iteration_limit_reached
:trace
:x_trace
:f_trace
:f_calls
:converged
:g_norm_trace
:g_calls
:x_converged
:f_converged
:g_converged
:initial_state
]
@eval $f(istate::IteratorState) = $f(_OptimizationResults(istate))
end

# Result accessors that need to run `after_while!`:
for f in [
:minimizer
:minimum
]
@eval $f(istate::IteratorState) = $f(OptimizationResults(istate))
end
8 changes: 6 additions & 2 deletions src/multivariate/optimize/optimize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -152,11 +152,15 @@ function Base.iterate(iter::OptimIterator, istate = nothing)
end

function OptimizationResults(istate::IteratorState)
@unpack d, method, options, state = istate.iter
after_while!(d, state, method, options)
return _OptimizationResults(istate)
end

function _OptimizationResults(istate::IteratorState)
@unpack_IteratorState istate
@unpack d, initial_x, method, options, state = iter

after_while!(d, state, method, options)

# we can just check minimum, as we've earlier enforced same types/eltypes
# in variables besides the option settings
Tf = typeof(value(d))
Expand Down
22 changes: 21 additions & 1 deletion test/general/api.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,29 @@
@test haskey(Optim.trace(res_extended_nm)[1].metadata,"step_type")

local istate
for istate′ in Optim.optimizing(f, initial_x, BFGS())
for istate′ in Optim.optimizing(f, initial_x, BFGS(),
Optim.Options(extended_trace = true,
store_trace = true))
istate = istate′
break
end
# (smoke) tests for accessor functions:
@test summary(istate) == "BFGS"
@test Optim.minimizer(istate) isa Vector{Float64}
@test Optim.minimum(istate) isa Float64
@test Optim.iterations(istate) == 0
@test Optim.iteration_limit_reached(istate) == false
@test Optim.trace(istate) isa Vector{<:Optim.OptimizationState}
@test Optim.x_trace(istate) isa Vector{Vector{Float64}}
@test Optim.f_trace(istate) isa Vector{Float64}
@test Optim.f_calls(istate) == 1
@test Optim.converged(istate) == false
@test Optim.g_norm_trace(istate) isa Vector{Float64}
@test Optim.g_calls(istate) == 1
@test Optim.x_converged(istate) == false
@test Optim.f_converged(istate) == false
@test Optim.g_converged(istate) == false
@test Optim.initial_state(istate) == initial_x
@test Optim.OptimizationResults(istate) isa Optim.MultivariateOptimizationResults
end

Expand Down

0 comments on commit 81b506e

Please sign in to comment.