-
Notifications
You must be signed in to change notification settings - Fork 14
/
preprocess.lua
128 lines (109 loc) · 4.28 KB
/
preprocess.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
print('Pre-processing voc dataset and VGG models...')
require 'cudnn'
require 'loadcaffe'
require 'image'
--matio = require 'matio'
voc_tools = dofile('pascal_voc.lua')
dofile('opts.lua')
function VGGF()
local model_converted = loadcaffe.load(
opts.PATHS.BASE_MODEL_RAW.PROTOTXT,
opts.PATHS.BASE_MODEL_RAW.CAFFEMODEL,
'cudnn'
):float()
torch.save(opts.PATHS.BASE_MODEL_CACHED, model_converted)
end
function VGG16()
print(opts.PATHS.BASE_MODEL_RAW.PROTOTXT)
print(opts.PATHS.BASE_MODEL_RAW.CAFFEMODEL)
local model_converted = loadcaffe.load(
opts.PATHS.BASE_MODEL_RAW.PROTOTXT,
opts.PATHS.BASE_MODEL_RAW.CAFFEMODEL,
'cudnn'
):float()
print(opts.PATHS.BASE_MODEL_CACHED)
torch.save(opts.PATHS.BASE_MODEL_CACHED, model_converted)
end
function AlexNet()
print(opts.PATHS.BASE_MODEL_RAW.PROTOTXT)
print(opts.PATHS.BASE_MODEL_RAW.CAFFEMODEL)
local model_converted = loadcaffe.load(
opts.PATHS.BASE_MODEL_RAW.PROTOTXT,
opts.PATHS.BASE_MODEL_RAW.CAFFEMODEL,
'cudnn'
):float()
print(opts.PATHS.BASE_MODEL_CACHED)
torch.save(opts.PATHS.BASE_MODEL_CACHED, model_converted)
end
function GoogleNet()
local model_prototxt_path = 'data/models/GoogleNet_deploy.prototxt'
local model_caffemodel_path = 'data/models/imagenet_googleletCAM_train_iter_120000.caffemodel'
local model_converted = loadcaffe.load(model_prototxt_path, model_caffemodel_path, 'cudnn'):float()
print('---googlenet model saved in:')
print(opts.PATHS.BASE_MODEL_CACHED)
torch.save(opts.PATHS.BASE_MODEL_CACHED, model_converted)
end
function VOC()
local function copy_proposals_in_dataset(trainval_test_mat_paths, voc)
local subset_paths = {
{'train', trainval_test_mat_paths.trainval},
{'val', trainval_test_mat_paths.trainval},
{'test', trainval_test_mat_paths.test}
}
local m = {train = {}, val = {}, test = {}}
local b = {train = nil, val = nil, test = nil}
local s = {train = nil, val = nil, test = nil}
for _, t in ipairs(subset_paths) do
local h = matio.load(t[2])
b[t[1]] = h.boxes
s[t[1]] = h.boxScores
for exampleIdx = 1, #b[t[1]] do
m[t[1]][h.images[exampleIdx]:storage():string()] = exampleIdx
end
end
for _, subset in ipairs{'train', 'val', 'test'} do
voc[subset].rois = {}
for exampleIdx = 1, voc[subset]:getNumExamples() do
local ind = m[subset][voc[subset]:getImageFileName(exampleIdx)]
local box_scores = s[subset] and s[subset][ind] or torch.FloatTensor(b[subset][ind]:size(1), 1):zero()
--local box_scores = torch.FloatTensor(b[subset][ind]:size(1), 1):zero()
voc[subset].rois[exampleIdx] = torch.cat(b[subset][ind]:index(2, torch.LongTensor{2, 1, 4, 3}):float() - 1, box_scores)
if s[subset] then
voc[subset].rois[exampleIdx] = voc[subset].rois[exampleIdx]:index(1, ({box_scores:squeeze(2):sort(1, true)})[2]:sub(1, math.min(box_scores:size(1), 2048)))
end
end
voc[subset].getProposals = function(self, exampleIdx)
return self.rois[exampleIdx]
end
end
voc['trainval'].getProposals = function(self, exampleIdx)
return exampleIdx <= self.train:getNumExamples() and self.train:getProposals(exampleIdx) or self.val:getProposals(exampleIdx - self.train:getNumExamples())
end
end
local function filter_proposals(voc)
local min_width_height = 20
for _, subset in ipairs{'train', 'val', 'test'} do
for exampleIdx = 1, voc[subset]:getNumExamples() do
local x1, y1, x2, y2 = unpack(voc[subset].rois[exampleIdx]:split(1, 2))
local channels, height, width = unpack(image.decompressJPG(voc[subset]:getJpegBytes(exampleIdx)):size():totable())
assert(x1:ge(0):all() and x1:le(width):all())
assert(x2:ge(0):all() and x2:le(width):all())
assert(y1:ge(0):all() and y1:le(height):all())
assert(y2:ge(0):all() and y2:le(height):all())
assert(x1:le(x2):all() and y1:le(y2):all())
voc[subset].rois[exampleIdx] = voc[subset].rois[exampleIdx]:index(1, (x2 - x1):ge(min_width_height):cmul((y2 - y1):ge(min_width_height)):squeeze(2):nonzero():squeeze(2))
end
end
end
local voc = voc_tools.load(opts.PATHS.VOC_DEVKIT_VOCYEAR)
copy_proposals_in_dataset(opts.PATHS.PROPOSALS, voc)
filter_proposals(voc)
torch.save(opts.PATHS.DATASET_CACHED, voc)
end
--print('Preprocessing VOC')
--VOC()
print('VOC has been processed')
--print('Preprocessing VGGF')
--VGGF()
print('VGG models has been processed')
print('Done')