-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgradientDescent.jl
36 lines (25 loc) · 1.02 KB
/
gradientDescent.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
function gradientDescent(X, y, theta, alpha, num_iters)
#GRADIENTDESCENT Performs gradient descent to learn theta
# theta = GRADIENTDESENT(X, y, theta, alpha, num_iters) updates theta by
# taking num_iters gradient steps with learning rate alpha
# Initialize some useful values
m = length(y); # number of training examples
J_history = zeros(num_iters, 1);
for iter = [1:num_iters]
# ====================== YOUR CODE HERE ======================
# Instructions: Perform a single gradient step on the parameter vector
# theta.
#
# Hint: While debugging, it can be useful to print out the values
# of the cost function (computeCost) and gradient here.
#
#println(size(X))
#println(size(theta))
#println(size(y))
theta = theta - alpha*(X'*(X*theta-y))/m
# ============================================================
# Save the cost J in every iteration
J_history[iter] = computeCost(X, y, theta);
end
return (theta,J_history)
end