-
Notifications
You must be signed in to change notification settings - Fork 1
/
colab.py
executable file
·45 lines (31 loc) · 1.12 KB
/
colab.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# -*- coding: utf-8 -*-
"""Untitled0.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1uVpx0WyOmNJs2kcJmRs65QLKMbnKs02G
"""
import numpy as np
import matplotlib.pyplot as plt
from keras.models import Sequential #linear stack of layers
from keras.layers import Dense
#in Dense every node of previous layer connected to every node of next layer
from keras.optimizers import Adam #gradient descent algorithm
#recommended optimizer by kernel , it control learning rate and evaluation metrics(accuracy,mse,r2score etc)
np.random.seed(0) #generates the same points everytime
n_pts = 500
x = np.linspace(0,10,n_pts)
y = 5*x + 13 + np.random.uniform(-5,5,n_pts)
plt.scatter(x,y)
plt.show()
model = Sequential()
model.add(Dense(units=1,input_dim=1,activation='linear'))
model.compile(Adam(lr=0.01),loss='mse')
model.summary()
h = model.fit(x,y,epochs=100,verbose=1,validation_split=0.1)
plt.plot(h.history['loss'],label= 'training')
plt.plot(h.history['val_loss'],label= 'testing')
plt.legend()
plt.show()
plt.plot(x,model.predict(x),color='r')
plt.scatter(x,y)
plt.show()