From 053992fb6ec1e3914ff3eb0b0fe1ca547c663166 Mon Sep 17 00:00:00 2001 From: ayush1999 Date: Mon, 9 Jul 2018 17:05:34 +0530 Subject: [PATCH 1/3] Initial LRNorm commit --- src/Flux.jl | 2 +- src/layers/normalise.jl | 48 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/src/Flux.jl b/src/Flux.jl index 7d1d66e64d..6e3930abbe 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -8,7 +8,7 @@ using Juno, Requires, Reexport using MacroTools: @forward export Chain, Dense, RNN, LSTM, GRU, Conv, - Dropout, LayerNorm, BatchNorm, + Dropout, LayerNorm, BatchNorm, LRNorm, params, mapleaves, cpu, gpu @reexport using NNlib diff --git a/src/layers/normalise.jl b/src/layers/normalise.jl index 54f5eb5634..8bfd384f6c 100644 --- a/src/layers/normalise.jl +++ b/src/layers/normalise.jl @@ -157,3 +157,51 @@ function Base.show(io::IO, l::BatchNorm) (l.λ == identity) || print(io, ", λ = $(l.λ)") print(io, ")") end + +""" + LRNorm(k, n::Integer, α, β) + +Local Response Normalization layer. The `n` input should be the number of +adjacent kernel maps to sum over. + +See [ImageNet Classification with Deep Convolutional +Neural Networks](https://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf). + +Example: + +```julia +m = LRNorm(0.2, 5, 0.5, 2) +m(ip) #ip is the 4-D input +""" +mutable struct LRNorm{F,I} + k + n::I + α::F + β +end + +function (LRN::LRNorm)(x) + w,h,C,N = size(x) + temp = zeros(size(x)) + for z_=1:N + for x_=1:w + for y_=1:h + for i=1:C + constant = LRN.k + for j=max(1,i-div(LRN.n,2)): min(C, i+div(LRN.n,2)) + constant += LRN.α * (x[x_,y_,j,z_]^2) + end + constant = constant^LRN.β + temp[x_,y_,i,z_] = x[x_,y_,i,z_] / constant + end + end + end + end + return temp +end + +children(LRN::LRNorm) = + (LRN.k, LRN,n, LRN.α, LRN.β) + +mapchildren(f, LRN::LRNorm) = + LRNorm(f(LRN.k), LRN.n, f(LRN.α), f(LRN.β)) From 91e9e579749e097809de2d35def15d8586cfec4a Mon Sep 17 00:00:00 2001 From: ayush1999 Date: Tue, 17 Jul 2018 22:08:20 +0530 Subject: [PATCH 2/3] few more changes --- src/layers/normalise.jl | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/src/layers/normalise.jl b/src/layers/normalise.jl index 8bfd384f6c..89e77f0491 100644 --- a/src/layers/normalise.jl +++ b/src/layers/normalise.jl @@ -183,25 +183,16 @@ end function (LRN::LRNorm)(x) w,h,C,N = size(x) temp = zeros(size(x)) - for z_=1:N - for x_=1:w - for y_=1:h - for i=1:C - constant = LRN.k - for j=max(1,i-div(LRN.n,2)): min(C, i+div(LRN.n,2)) - constant += LRN.α * (x[x_,y_,j,z_]^2) - end - constant = constant^LRN.β - temp[x_,y_,i,z_] = x[x_,y_,i,z_] / constant - end - end + for z_=1:N, x_=1:w ,y_=1:h, i=1:C + constant = LRN.k + for j=max(1,i-div(LRN.n,2)): min(C, i+div(LRN.n,2)) + constant += LRN.α * (x[x_,y_,j,z_]^2) end + constant = constant^LRN.β + temp[x_,y_,i,z_] = x[x_,y_,i,z_] / constant end return temp end - -children(LRN::LRNorm) = - (LRN.k, LRN,n, LRN.α, LRN.β) -mapchildren(f, LRN::LRNorm) = - LRNorm(f(LRN.k), LRN.n, f(LRN.α), f(LRN.β)) +treelike(LRNorm) + \ No newline at end of file From 9b56439e3f5191094d2120af90140e0cc4b8c557 Mon Sep 17 00:00:00 2001 From: ayush1999 Date: Fri, 27 Jul 2018 00:07:26 +0530 Subject: [PATCH 3/3] changes -2 --- src/layers/normalise.jl | 36 +++++++++++++++++++++++------------- test/layers/normalisation.jl | 5 +++++ 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/src/layers/normalise.jl b/src/layers/normalise.jl index 89e77f0491..0072636de6 100644 --- a/src/layers/normalise.jl +++ b/src/layers/normalise.jl @@ -181,18 +181,28 @@ mutable struct LRNorm{F,I} end function (LRN::LRNorm)(x) - w,h,C,N = size(x) - temp = zeros(size(x)) - for z_=1:N, x_=1:w ,y_=1:h, i=1:C - constant = LRN.k - for j=max(1,i-div(LRN.n,2)): min(C, i+div(LRN.n,2)) - constant += LRN.α * (x[x_,y_,j,z_]^2) - end - constant = constant^LRN.β - temp[x_,y_,i,z_] = x[x_,y_,i,z_] / constant - end - return temp -end + l1 = length(x)/size(x, 3) |> Int + res = [] + [push!(res, compute_inner(getindex(x, i, j, :, k), LRN.k, LRN.n, LRN.α, LRN.β)) + for i=1:size(x, 1),j=1:size(x, 2),k=1:size(x, 4)] + ans = [] + [push!(ans, res[i][j]) for i=1:l1, j=1:size(x, 3)] + return reshape(ans , size(x)) +end + +function compute_inner(x, k, n, alpha, beta) # x is the vector along the channel depth + start_ = [max(1,i-div(n,2)) for i=1:length(x)] + end_ = [min(length(x), i+div(n,2)) for i=1:length(x)] + c = [sum((getindex(x, getindex(start_, i) : getindex(end_, i))).^2) for i=1:length(x)] + constant = c.*alpha .+ k + constant = constant .^ beta + return x./constant +end + +function convrt_to_4d(ip) + +end + treelike(LRNorm) - \ No newline at end of file + \ No newline at end of file diff --git a/test/layers/normalisation.jl b/test/layers/normalisation.jl index 0fdb10218f..330125ee38 100644 --- a/test/layers/normalisation.jl +++ b/test/layers/normalisation.jl @@ -96,3 +96,8 @@ end @test m(x) == y end end + +@testset "LRNorm" begin + x = rand(2,3,4,5) + @test size(x) == LRNorm(0.2, 5, 0.5, 2)(x) |> size +end \ No newline at end of file