-
Notifications
You must be signed in to change notification settings - Fork 0
/
backprop.go
53 lines (45 loc) · 1.48 KB
/
backprop.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
package main
import "math"
const startingCoefficient float64 = 1.0
const coeffDecay float64 = 1.0
type BackPropogator struct {
nn NeuronNetwork
expectedOutput float64
iteration int
}
func (bp *BackPropogator) Propogate(output float64) {
err := (bp.expectedOutput - output) * (1 - output) * output;
coeff := startingCoefficient * math.Pow(coeffDecay, float64(bp.iteration))
errors := [3][3]float64{}
// Error Calculation: d3 = w34d4 + w35d5
for i := len(bp.nn.neuronLayers) - 1; i >= 0; i-- {
errors[i] = [3]float64{}
for j := 0; j < len(bp.nn.neuronLayers[i].neurons); j++ {
if i == len(bp.nn.neuronLayers) -1 { // Top layer, use err instead
errors[i][j] = err
} else {
for k := 0; k < len(bp.nn.neuronLayers[i+1].neurons); k++ {
if k == 0 {
errors[i][j] = 0
}
errors[i][j] += bp.nn.neuronLayers[i+1].neurons[k].weights[j] * errors[i+1][k]
}
}
}
}
// Weight Adjustment: w'12 = w12 + ( h * d * df1(e)/de * y2)
for i := 0; i < len(bp.nn.neuronLayers); i++ {
for j := 0; j < len(bp.nn.neuronLayers[i].neurons); j ++ {
for k := 0; k < len(bp.nn.neuronLayers[i].neurons[j].weights); k++ {
input := 0.0
if i == 0 {
input = bp.nn.inputs[k]
} else {
input = bp.nn.neuronLayers[i - 1].neurons[k].output
}
bp.nn.neuronLayers[i].neurons[j].weights[k] += coeff * errors[i][j] * math.Atan(input)
}
}
}
bp.iteration++
}