Skip to content

Commit

Permalink
added comments to hw1
Browse files Browse the repository at this point in the history
  • Loading branch information
fridgei committed Oct 29, 2012
1 parent c9a37c3 commit 73b6b05
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 10 deletions.
25 changes: 18 additions & 7 deletions perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,28 +12,39 @@
y_test = map(lambda x: x[0], data_test)
xs_test = map(lambda x: (1, x[1], x[2]), data_test)

"""
p_train takes a list of x vectors and a list of their corresponding classification
and returns a function representing the learned decision boundary
"""
def p_train(xs,ys,shuffled=False,epoch=0):
w = list(repeat(0, len(xs[0])))
# Create initial vector of zero weights
w = list(repeat(0, len(xs[0])))
# function used for taking dot product of vectors
dot_product = lambda x, w: sum(imap(mul, x, w))
updated = True
# pack the input vectors with their corresponding outputs
data = zip(xs,ys)
if shuffled:
random.shuffle(data)
if epoch:
# if we are doing partial epochs slice the list from zero to epoch
if epoch:
data = data[:epoch]
while updated:
# if we are shuffling our epochs do so
if shuffled:
random.shuffle(data)
updated = False
for x,y in data:
print y
# check if data is correctly classified
if dot_product(x, w) * y <= 0:
# if not adjust the weight vector
w = [y * xi + wi for xi, wi in izip(x, w)]
updated = True
return lambda x: -(w[0] + x * w[1]) / w[2]
# return a function creates the learned decision boundary
return lambda x: -(w[0] + x * w[1]) / w[2]

x = range(-4,9)

# Graph generation

x = range(-4,9)
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 14}
Expand Down
20 changes: 17 additions & 3 deletions regression.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import division
from operator import mul
from itertools import imap, izip, repeat
from numpy import matrix
from numpy import matrix
import numpy as np

f = open('tn-train.txt')
Expand All @@ -16,7 +16,10 @@

xs = [list(repeat(1,len(x1))) ,x1, x2]


"""
Given a list of input vectors and the corresponding output learn a linear regression
and return a function which predicts y given 2 inputs
"""
def multi_regression(xs,y):
xs = matrix(xs)
xTy = np.dot(xs,y)
Expand All @@ -27,7 +30,9 @@ def multi_regression(xs,y):
print type(w[0])
return lambda x1,x2 : w[0] + x1*w[1] + x2*w[2]


"""
Given a list of x and y values return a function which predicts y for a given x
"""
def linear_regression(x, y):
avg = lambda data: sum(data) / len(data)
xy_bar = avg(map(mul, x, y))
Expand All @@ -38,6 +43,15 @@ def linear_regression(x, y):
b = y_bar - w * x_bar
return lambda x: w*x + b


"""
Tests and SSE
In our document we misunderstood the SSE who our values are too low as we divided by the number of tests
so the SSE is off by a factor of the len(y)
"""


f = multi_regression(xs,y)
sse = 0
for xi,xii,output in zip(x1,x2,y):
Expand Down

0 comments on commit 73b6b05

Please sign in to comment.