-
Notifications
You must be signed in to change notification settings - Fork 1.6k
/
answer_95.py
100 lines (82 loc) · 2.89 KB
/
answer_95.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import numpy as np
np.random.seed(0)
# neural network
class NN:
def __init__(self, ind=2, w=64, w2=64, outd=1, lr=0.1):
# layer 1 weight
self.w1 = np.random.normal(0, 1, [ind, w])
# layer 1 bias
self.b1 = np.random.normal(0, 1, [w])
# layer 2 weight
self.w2 = np.random.normal(0, 1, [w, w2])
# layer 2 bias
self.b2 = np.random.normal(0, 1, [w2])
# output layer weight
self.wout = np.random.normal(0, 1, [w2, outd])
# output layer bias
self.bout = np.random.normal(0, 1, [outd])
# learning rate
self.lr = lr
def forward(self, x):
# input tensor
self.z1 = x
# layer 1 output tensor
self.z2 = sigmoid(np.dot(self.z1, self.w1) + self.b1)
# layer 2 output tensor
self.z3 = sigmoid(np.dot(self.z2, self.w2) + self.b2)
# output layer tensor
self.out = sigmoid(np.dot(self.z3, self.wout) + self.bout)
return self.out
def train(self, x, t):
# backpropagation output layer
#En = t * np.log(self.out) + (1-t) * np.log(1-self.out)
En = (self.out - t) * self.out * (1 - self.out)
# get gradients for weight and bias
grad_wout = np.dot(self.z3.T, En)
grad_bout = np.dot(np.ones([En.shape[0]]), En)
# update weight and bias
self.wout -= self.lr * grad_wout
self.bout -= self.lr * grad_bout
# backpropagation inter layer
# get gradients for weight and bias
grad_u2 = np.dot(En, self.wout.T) * self.z3 * (1 - self.z3)
grad_w2 = np.dot(self.z2.T, grad_u2)
grad_b2 = np.dot(np.ones([grad_u2.shape[0]]), grad_u2)
# update weight and bias
self.w2 -= self.lr * grad_w2
self.b2 -= self.lr * grad_b2
# get gradients for weight and bias
grad_u1 = np.dot(grad_u2, self.w2.T) * self.z2 * (1 - self.z2)
grad_w1 = np.dot(self.z1.T, grad_u1)
grad_b1 = np.dot(np.ones([grad_u1.shape[0]]), grad_u1)
# update weight and bias
self.w1 -= self.lr * grad_w1
self.b1 -= self.lr * grad_b1
# sigmoid
def sigmoid(x):
return 1. / (1. + np.exp(-x))
# train
def train_nn(nn, train_x, train_t, iteration_N=5000):
for i in range(5000):
# feed-forward data
nn.forward(train_x)
#print("ite>>", i, 'y >>', nn.forward(train_x))
# update parameters
nn.train(train_x, train_t)
return nn
# test
def test_nn(nn, test_x, test_t):
for j in range(len(test_x)):
x = train_x[j]
t = train_t[j]
print("in:", x, "pred:", nn.forward(x))
# train data
train_x = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)
# train label data
train_t = np.array([[0], [1], [1], [0]], dtype=np.float32)
# prepare neural network
nn = NN()
# train
nn = train_nn(nn, train_x, train_t, iteration_N=5000)
# test
test_nn(nn, train_x, train_t)