Skip to content

Commit

Permalink
add LeakyReLU activation
Browse files Browse the repository at this point in the history
  • Loading branch information
schillic committed Aug 16, 2023
1 parent 92d302c commit 65e31ff
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 2 deletions.
1 change: 1 addition & 0 deletions docs/src/lib/Architecture.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ Id
ReLU
Sigmoid
Tanh
LeakyReLU
```

The following strings can be parsed as activation functions:
Expand Down
21 changes: 21 additions & 0 deletions src/Architecture/ActivationFunction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,27 @@ struct Tanh <: ActivationFunction end

(::Tanh)(x) = tanh.(x)

"""
LeakyReLU{N<:Number}
Leaky ReLU activation.
```math
fₐ(x) = x > 0 ? x : a x
```
where ``a`` is the parameter.
### Fields
- `slope` -- parameter for negative inputs
"""
struct LeakyReLU{N<:Number} <: ActivationFunction
slope::N
end

(lr::LeakyReLU)(x::Number) = x >= zero(x) ? x : lr.slope * x
(lr::LeakyReLU)(x::AbstractVector) = lr.(x)

# constant instances of each activation function
const _id = Id()
const _relu = ReLU()
Expand Down
2 changes: 1 addition & 1 deletion src/Architecture/Architecture.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ using Requires
export AbstractNeuralNetwork, AbstractLayerOp,
FeedforwardNetwork, DenseLayerOp,
dim_in, dim_out, dim,
ActivationFunction, Id, ReLU, Sigmoid, Tanh
ActivationFunction, Id, ReLU, Sigmoid, Tanh, LeakyReLU

include("AbstractNeuralNetwork.jl")
include("AbstractLayerOp.jl")
Expand Down
11 changes: 10 additions & 1 deletion test/Architecture/DenseLayerOp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,20 @@ function test_layer(L::DenseLayerOp{Tanh})
@test L(x) [0.986, 0, -0.998] atol = 1e-3
end

function test_layer(L::DenseLayerOp{<:LeakyReLU})
@test L(x) == [2.5, 0, -0.035]
end

function test_layer(L::DenseLayerOp)
return error("untested activation function: ", typeof(L.activation))
end

# run test with all activations
for act in subtypes(ActivationFunction)
test_layer(DenseLayerOp(W, b, act()))
if act == LeakyReLU
act_inst = LeakyReLU(0.01)
else
act_inst = act()
end
test_layer(DenseLayerOp(W, b, act_inst))
end

0 comments on commit 65e31ff

Please sign in to comment.