Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 0 additions & 60 deletions src/gradients.jl
Original file line number Diff line number Diff line change
Expand Up @@ -352,63 +352,3 @@ function finite_difference_gradient!(
end
df
end

# vector of derivatives of a scalar->vector map
# this is effectively a vector of partial derivatives, but we still call it a gradient
function finite_difference_gradient!(
df,
f,
x::Number,
cache::GradientCache{T1,T2,T3,T4,fdtype,returntype,inplace};
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep,
dir=true) where {T1,T2,T3,T4,fdtype,returntype,inplace}

# NOTE: in this case epsilon is a scalar, we need two arrays for fx1 and fx2
# c1 denotes fx1, c2 is fx2, sizes guaranteed by the cache constructor
fx, c1, c2 = cache.fx, cache.c1, cache.c2

if inplace == Val(true)
_c1, _c2 = c1, c2
end

if fdtype == Val(:forward)
epsilon = compute_epsilon(Val(:forward), x, relstep, absstep, dir)
if inplace == Val(true)
f(c1, x + epsilon)
else
_c1 = f(x + epsilon)
end
if typeof(fx) != Nothing
@. df = (_c1 - fx) / epsilon
else
if inplace == Val(true)
f(c2, x)
else
_c2 = f(x)
end
@. df = (_c1 - _c2) / epsilon
end
elseif fdtype == Val(:central)
epsilon = compute_epsilon(Val(:central), x, relstep, absstep, dir)
if inplace == Val(true)
f(c1, x + epsilon)
f(c2, x - epsilon)
else
_c1 = f(x + epsilon)
_c2 = f(x - epsilon)
end
@. df = (_c1 - _c2) / (2 * epsilon)
elseif fdtype == Val(:complex) && returntype <: Real
epsilon_complex = eps(real(eltype(x)))
if inplace == Val(true)
f(c1, x + im * epsilon_complex)
else
_c1 = f(x + im * epsilon_complex)
end
@. df = imag(_c1) / epsilon_complex
else
fdtype_error(returntype)
end
df
end
49 changes: 0 additions & 49 deletions test/finitedifftests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -261,55 +261,6 @@ central_cache = FiniteDiff.GradientCache(df, x, Val{:central}, eltype(df))
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 1e-8
end

f(df, x) = (df[1] = sin(x); df[2] = cos(x); df)
z = x = 2π * rand()
fx = fill(0.0, 2)
f(fx, x)
ff(df, x) = !all(x .<= z) ? error() : f(df, x)
df = fill(0.0, 2)
df_ref = [cos(x), -sin(x)]
forward_cache = FiniteDiff.GradientCache(df, x, Val{:forward})
central_cache = FiniteDiff.GradientCache(df, x, Val{:central})
complex_cache = FiniteDiff.GradientCache(df, x, Val{:complex})


@time @testset "Gradient of f:scalar->vector real-valued tests" begin
@test_broken err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}, eltype(x), Val{true}, fx), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient(ff, x, Val{:forward}, eltype(x), Val{true}, fx, dir=-1), df_ref) < 1e-4
@test_throws Any err_func(FiniteDiff.finite_difference_gradient(ff, x, Val{:forward}), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:central}, eltype(x), Val{true}, fx), df_ref) < 1e-8
@test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:complex}, eltype(x), Val{true}, fx), df_ref) < 1e-15

@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:forward}), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:central}), df_ref) < 1e-8
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:complex}), df_ref) < 1e-15

@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, forward_cache), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 1e-8
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, complex_cache), df_ref) < 1e-15
end

f(df, x) = (df[1] = sin(x); df[2] = cos(x); df)
x = (2π * rand()) * (1 + im)
fx = fill(zero(typeof(x)), 2)
f(fx, x)
df = zero(fx)
df_ref = [cos(x), -sin(x)]
forward_cache = FiniteDiff.GradientCache(df, x, Val{:forward})
central_cache = FiniteDiff.GradientCache(df, x, Val{:central})

@time @testset "Gradient of f:vector->scalar complex-valued tests" begin
@test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}, eltype(x), Val{true}, fx), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:central}, eltype(x), Val{true}, fx), df_ref) < 3e-7

@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:forward}), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:central}), df_ref) < 3e-7

@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, forward_cache), df_ref) < 1e-4
@test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 3e-7
end

function ret_allocs(res, _f, x, cache)
allocs = @allocated FiniteDiff.finite_difference_gradient!(res, _f, x, cache)
allocs
Expand Down