This repository has been archived by the owner on Mar 7, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathFNN.py
145 lines (115 loc) · 5.18 KB
/
FNN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
from copy import deepcopy
from utils.liveupdate import *
from utils.storage import *
from dataparser import *
from activator import *
import numpy as np
import filters
import pickle
import signal
import time
import os
class Neuron:
def __init__(self, synapse=None, layerActivation=None, bias=None, activator=sigmoid):
self.next = None
self.back = None
self.layerActivation = layerActivation
self.layerPlain = None
self.synapse = synapse
self.bias = bias
self.activator = activator
self.error = None
self.number = 0
class NeuronNetwork:
def __init__(self, trainingInput, trainingOutput):
self.trainInput = trainingInput
self.trainOutput = trainingOutput
self.firstNeuron = Neuron(layerActivation=self.trainInput)
self.lastNeuron = self.firstNeuron
def createNeuron(self, layerDimensions, activator=sigmoid, setAxis=False):
bias = np.random.randn(layerDimensions[1])
synapse = np.random.rand(layerDimensions[0], layerDimensions[1])
newNeuron = Neuron(synapse=synapse, bias=bias, activator=activator)
newNeuron.number = self.lastNeuron.number + 1
newNeuron.setAxis = setAxis
self.connectNeurons(self.lastNeuron, newNeuron)
self.lastNeuron = self.lastNeuron.next
def connectNeurons(self, backNeuron, nextNeuron):
backNeuron.next = nextNeuron
nextNeuron.back = backNeuron
def isNoneType(value):
return (type(value) == type(None))
if not isNoneType(backNeuron.synapse):
if not (backNeuron.synapse.shape[-1] == nextNeuron.synapse.shape[0]):
raise Exception("Bad neuron arcitecture {} -> {}".format(backNeuron.number, nextNeuron.number))
elif not backNeuron.layerActivation.shape[-1] == nextNeuron.synapse.shape[0]:
raise Exception("Bad neuron arcitecture {} -> {}".format(backNeuron.number, nextNeuron.number))
def predict(self, trainingInput):
currentNeuron = self.firstNeuron
feedForward = trainingInput
while currentNeuron.next != None:
feedForward = currentNeuron.activator(np.dot(feedForward, currentNeuron.next.synapse))
currentNeuron = currentNeuron.next
return currentNeuron.activator(feedForward)
def classifyImage(self, inputImage):
imageConvilution = self.data.parsed(inputImage)
classification = np.zeros((self.lastNeuron.synapse.shape[-1],))
for index in range(convolutionLayer.kernelCount):
response = self.predict(np.hstack(imageConvilution[index, :, :]))
classification += response
classification = classification / convolutionLayer.kernelCount
return np.argmax(classification)
def testNetwork(self, testingInput, testingOutput):
error = 0
for testimage in range(testingInput.shape[0]):
if not np.argmax(testingOutput[testimage]) == self.classifyImage(testingInput[testimage]):
error += 1
print("Error rate {}".format(error/testingInput.shape[0] * 100))
if __name__ == "__main__":
convolutionLayer = filters.Convilution()
convolutionLayer.addKernel(np.array([[[-1, 0, 1],
[-1, 0, 1],
[-1, 0, 1]]]))
convolutionLayer.addKernel(np.array([[[0, 1, 0],
[1, -4, 1],
[0, 1, 0]]]))
convolutionLayer.addKernel(np.array([[[-1,-1,-1],
[0,0,0],
[1,1,1]]]))
data = DataParser(convolutionLayer)
trainingInput, trainingOutput, testingInput, testingOutput = data.trainingInput, data.trainingOutput, data.testingInput , data.testingOutput
neuralNetwork = None
errorPlot = livePlot()
if(neuralNetwork == None):
neuralNetwork = NeuronNetwork(trainingInput, trainingOutput)
neuralNetwork.createNeuron((trainingInput.shape[-1], 4), activator=sigmoid)
neuralNetwork.createNeuron((4, trainingOutput.shape[-1]), activator=specialSoftmax, setAxis=True)
neuralNetwork.data = data
for count in range(100000):
# skipping input layer
currentNeuron = neuralNetwork.firstNeuron.next
# feedforward
while currentNeuron != None:
currentNeuron.layerPlain = currentNeuron.back.layerActivation.dot(currentNeuron.synapse) + currentNeuron.bias
currentNeuron.layerActivation = currentNeuron.activator(currentNeuron.layerPlain, currentNeuron.setAxis)
currentNeuron = currentNeuron.next
currentNeuron = neuralNetwork.lastNeuron
if(count % 100 == 0):
errorPlot.add(np.sum(-neuralNetwork.trainOutput * np.log((currentNeuron.layerActivation))))
# backpropagation
while currentNeuron.back != None:
currentNeuron.newSyn = deepcopy(currentNeuron.synapse)
if(currentNeuron.next == None):
currentNeuron.error = (currentNeuron.layerActivation - neuralNetwork.trainOutput)
currentNeuron.bias -= (10e-6) * currentNeuron.error.sum(axis=0)
currentNeuron.newSyn -= (10e-6) * currentNeuron.back.layerActivation.T.dot(currentNeuron.error)
else:
currentNeuron.error = (currentNeuron.next.error).dot(currentNeuron.next.synapse.T)
delta = currentNeuron.error * currentNeuron.activator(currentNeuron.layerPlain, derivation=True)
currentNeuron.bias -= (10e-6) * delta.sum(axis=0)
currentNeuron.newSyn -= (10e-6) * currentNeuron.back.layerActivation.T.dot(delta)
# can now update the layer in front.
currentNeuron.next.synapse = currentNeuron.next.newSyn
currentNeuron = currentNeuron.back
currentNeuron.next.synapse = currentNeuron.next.newSyn
neuralNetwork.testNetwork(testingInput, testingOutput)