-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathpyvision_common.py
87 lines (72 loc) · 2.23 KB
/
pyvision_common.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 30 20:34:30 2014
@author: pi19404
"""
import numpy
import numpy as np
import math
from scipy import optimize
from numpy.linalg import norm
import os
import sys
import time
from sklearn.metrics import confusion_matrix
import sklearn.metrics as met
import LoadDataSets
import LogisticRegression
import cPickle as pickle
def sigmoid_stable(x):
"Numerically-stable sigmoid function."
if x >= 0:
z = np.exp(-x)
return 1 / (1 + z)
else:
# if x is less than zero then z will be small, denom can't be
# zero because it's 1+z.
z = np.exp(x)
return z / (1 + z)
def grad_sigmoid(X):
s=sigmoid(X);
return s*(1-s);
def sigmoid(X):
""" This function applies sigmoid transformation on each element of input vector
:param X :Real Valued Input vector
:returns :Real Valued output Vector
"""
den = 1.0 + math.e ** (-1.0 * X)
d = 1.0 / den
return d
def softmax(W,b,x):
""" This function appliers softmax function to the input vector for multi class logistic regression """
""" compute the linear transformation """
#print np.shape(x),np.shape(W.T),np.shape(b)
vec2=numpy.dot(x,W.T).T;
vec=numpy.add(vec2,b);
#step performed for numerical stability of softmax function
#else for large values will get NaNs due to exp
#normalizing the input provides numerical stability
o1=numpy.add(vec,-vec.max(axis=0));
vec1=numpy.exp(o1);
res=(vec1)/(numpy.sum(vec1,axis=0));
#print np.shape(res),res
#ddd
#added warning in case of numerical instability"""
if np.sum(np.isnan(res)==True)>1:
print "softmax has become numerically unstable",np.sum(np.isnan(res)==True)
raise
return res.T;
""" function can be used to compute the derivative of p norm for p>1"""
def derivative_L2norm(W,p):
# den=np.apply_along_axis(np.linalg.norm ,1, W,p)
# den=pow(den,p-1);
# num=W*pow(abs(W),p-2);
# ret=(num.T/den).T;
# if np.sum(np.isnan(ret)==True)>0:
# print "XXX"
num=W;
den=np.sum(W**2,axis=1);
r=(num.T/den).T;
index=np.isnan(r)==True;
r[index]=0;
return r