-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathtest2.py
124 lines (104 loc) · 3.27 KB
/
test2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
#coding=utf-8
# decompress ip1 layer
import caffe
from caffe import layers as L, params as P, to_proto
from caffe.proto import caffe_pb2
import lmdb
import numpy as np
import os
import sys
from numpy import linalg as la
import matplotlib.pyplot as plt
from base import *
CAFFE_HOME = "/opt/caffe/"
RESULT_DIR = "./result/"
SVD_R = 50
deploySVD = GetIP1SVDProto(SVD_R)
deploy = "./proto/cifar10_quick.prototxt"
caffe_model = CAFFE_HOME + "/examples/cifar10/cifar10_quick_iter_5000.caffemodel.h5"
train_db = CAFFE_HOME + "examples/cifar10/cifar10_train_lmdb"
test_db = CAFFE_HOME + "examples/cifar10/cifar10_test_lmdb"
mean_proto = CAFFE_HOME + "examples/cifar10/mean.binaryproto"
mean_npy = "./mean.npy"
mean_pic = np.load(mean_npy)
def read_db(db_name):
lmdb_env = lmdb.open(db_name)
lmdb_txn = lmdb_env.begin()
lmdb_cursor = lmdb_txn.cursor()
datum = caffe.proto.caffe_pb2.Datum()
X = []
y = []
for key, value in lmdb_cursor:
datum.ParseFromString(value)
label = datum.label
data = caffe.io.datum_to_array(datum)
#data = data.swapaxes(0, 2).swapaxes(0, 1)
X.append(data)
y.append(label)
#plt.imshow(data)
#plt.show()
return X, np.array(y)
testX, testy = read_db(test_db)
if not os.path.exists("label.npy"):
np.save("label.npy", testy)
# Load model and network
net = caffe.Net(deploy, caffe_model, caffe.TEST)
for layer_name, param in net.params.items():
# 0 is weight, 1 is biases
print (layer_name, param[0].data.shape)
if SVD_R > 0:
netSVD = caffe.Net(deploySVD, caffe_model, caffe.TEST)
print ("SVD NET:")
for layer_name, param in netSVD.params.items():
# 0 is weight, 1 is biases
print (layer_name, param[0].data.shape)
print (type(net.params))
print (net.params.keys())
print ("layer ip2:")
print ("WEIGHT:")
print (net.params["ip2"][0].data.shape)
print ("BIASES:")
print (net.params["ip2"][1].data.shape)
data, label = L.Data(source = test_db, backend = P.Data.LMDB, batch_size = 100, ntop = 2, mean_file = mean_proto)
if SVD_R > 0:
# SVD
print ("SVD %d" % SVD_R)
u, sigma, vt = la.svd(net.params["ip1"][0].data)
print ("Sigma: ", sigma)
if SVD_R > len(sigma):
print ("SVD_R is too large :-(")
sys.exit()
U = np.matrix(u[:, :SVD_R])
S = np.matrix(np.diag(sigma[:SVD_R]))
VT = np.matrix(vt[:SVD_R, :])
print ("IP2", net.params["ip1"][0].data.shape) # 10, 64
print ("U", U.shape)
print ("S", S.shape)
print ("VT", VT.shape)
# y = Wx + b
# y = U * S * VT * x + b
# y = U * ((S * VT) * x) + b
# y = U * (Z * x) + b
Z = S * VT
np.copyto(netSVD.params["ipZ"][0].data, Z)
np.copyto(netSVD.params["ipU"][0].data, U)
np.copyto(netSVD.params["ipU"][1].data, net.params["ip1"][1].data)
nn = netSVD
else:
print ("NORMAL")
nn = net
n = len(testX)
pre = np.zeros(testy.shape)
print ("N = %d" % n)
for i in range(n):
nn.blobs["data"].data[...] = testX[i] - mean_pic
nn.forward()
prob = nn.blobs["prob"].data
pre[i] = prob.argmax()
print ("%d / %d" % (i + 1, n))
right = np.sum(pre == testy)
print ("Accuracy: %f" % (right * 1.0 / n))
if SVD_R > 0:
np.save(RESULT_DIR + "net_ip1_SVD%d.npy" % SVD_R, pre)
else:
np.save(RESULT_DIR + "net_ip1_normal.npy", pre)