-
Notifications
You must be signed in to change notification settings - Fork 0
/
t3.py
118 lines (99 loc) · 2.9 KB
/
t3.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
#Name: Vipul Ramtekkar
#Rollno: 16D110013
#Assignment 3
import numpy as np
import math
def user_func(x_star):
y = 100*(x_star[1] - x_star[0]**2 )**2 + (1 - x_star[0])**2
return y
def backtrackline(x_star,delta_x):
alpha = 5
rho = 0.8
c = 0.1
while user_func(np.add(x_star, alpha*delta_x)) > user_func(x_star) + c*alpha*(np.transpose(analytical_grad(x_star)).dot(delta_x)):
alpha = rho*alpha
return alpha
def BFGS(N,tol,x_star):
inv_B = np.identity(x_star.shape[0])
x1 = []
x2 = []
iterations = []
fx = []
for i in range(N):
x1.append(x_star[0])
x2.append(x_star[1])
iterations.append(i)
fx.append(user_func(x_star))
delta_x = -inv_B.dot(analytical_grad(x_star))
t = backtrackline(x_star,delta_x)
x_star_1 = np.add(x_star,t*delta_x)
y = analytical_grad(x_star_1) - analytical_grad(x_star)
s = x_star_1 - x_star
a = (np.identity(x_star.shape[0])-(1/np.dot(np.transpose(y),s))*np.dot(s,np.transpose(y)))
b = (np.identity(x_star.shape[0])-(1/np.dot(np.transpose(y),s))*np.dot(y,np.transpose(s)))
inv_B = np.dot(np.dot(a,inv_B),b) + (1/np.dot(np.transpose(y),s))*(np.dot(s,np.transpose(s)))
if user_func(x_star) - user_func(x_star_1) < tol:
x_star_1 = x_star
break
x_star = x_star_1
return x_star,x1,x2,fx,iterations
def DFP(N,tol,x_star):
inv_B = np.identity(x_star.shape[0])
x1 = []
x2 = []
iterations = []
fx = []
for i in range(N):
x1.append(x_star[0])
x2.append(x_star[1])
iterations.append(i)
fx.append(user_func(x_star))
delta_x = -inv_B.dot(analytical_grad(x_star))
t = backtrackline(x_star,delta_x)
x_star_1 = np.add(x_star,t*delta_x)
y = analytical_grad(x_star_1) - analytical_grad(x_star)
s = x_star_1 - x_star
numerator = np.dot(y,np.transpose(y))
numerator = np.dot(inv_B,numerator)
numerator = np.dot(numerator,inv_B)
inv_B = inv_B + 1/np.dot(np.transpose(s),y)*np.dot(s,np.transpose(s)) - (1/np.dot((np.dot(np.transpose(y),inv_B)),y))*numerator
if user_func(x_star) - user_func(x_star_1) < tol:
x_star_1 = x_star
break
x_star = x_star_1
return x_star,x1,x2,fx,iterations
def analytical_grad(x_star):
y = np.zeros([x_star.shape[0],1])
y[0] = 200*(x_star[1]-x_star[0]**2)*2*-1*x_star[0] - 2*(1-x_star[0])
y[1] = 200*(x_star[1]-x_star[0]**2)
return y
x_star = np.array([[1.5],[1.5]])
N = 15000
tol = 1e-8
x_star,x1,x2,fx,iterations = BFGS(N,tol,x_star)
print x_star
import matplotlib.pyplot as plt
plt.plot(iterations,x1)
plt.plot(iterations,x2)
plt.xlabel("iterations")
plt.ylabel("x value")
plt.show()
plt.plot(iterations,fx)
plt.ylabel("function value")
plt.xlabel("iterations")
plt.show()
x_star = np.array([[1.5],[1.5]])
N = 15000
tol = 1e-8
x_star,x1,x2,fx,iterations = DFP(N,tol,x_star)
print x_star
import matplotlib.pyplot as plt
plt.plot(iterations,x1)
plt.plot(iterations,x2)
plt.xlabel("iterations")
plt.ylabel("x value")
plt.show()
plt.plot(iterations,fx)
plt.ylabel("function value")
plt.xlabel("iterations")
plt.show()