-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathPerspectiveGridGenerator.lua
100 lines (87 loc) · 3.34 KB
/
PerspectiveGridGenerator.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
-- code adapted from github repo
-- implemented by Yijie Guo (guoyijie@umich.edu) and Xinchen Yan (skywalkeryxc@gmail.com)
local PGG, parent = torch.class('nn.PerspectiveGridGenerator', 'nn.Module')
function PGG:__init(depth,height,width,focal_length)
parent.__init(self)
assert(depth > 1)
assert(height > 1)
assert(width > 1)
self.depth = depth
self.height = height
self.width = width
local dmin = 1/(focal_length + math.sqrt(3))
local dmax = 1/(focal_length)
print(focal_length)
print(dmin .. ' ' .. dmax)
--zt = 1, xt, yt [-1, 1]
self.baseGrid = torch.Tensor(depth, height, width, 4)
for k=1,self.depth do
for i=1,self.height do
for j=1,self.width do
local disf = dmin + (k-1)/(self.depth-1) * (dmax-dmin)
--print(disf)
self.baseGrid[k][i][j][1] = 1/disf
self.baseGrid[k][i][j][2] = (-1 + (i-1)/(self.height-1) * 2)/disf
self.baseGrid[k][i][j][3] = (-1 + (j-1)/(self.width-1) * 2)/disf
self.baseGrid[k][i][j][4] = 1
end
end
end
self.batchGrid = torch.Tensor(1, depth, height, width, 4):copy(self.baseGrid)
end
local function addOuterDim(t)
local sizes = t:size()
local newsizes = torch.LongStorage(sizes:size()+1)
newsizes[1]=1
for i=1,sizes:size() do
newsizes[i+1]=sizes[i]
end
return t:view(newsizes)
end
function PGG:updateOutput(_transformMatrix)
local transformMatrix
if _transformMatrix:nDimension()==2 then
transformMatrix = addOuterDim(_transformMatrix)
else
transformMatrix = _transformMatrix
end
assert(transformMatrix:nDimension()==3
and transformMatrix:size(2)==4
and transformMatrix:size(3)==4
, 'please input affine transform matrices (bx4x4)')
local batchsize = transformMatrix:size(1)
if self.batchGrid:size(1) ~= batchsize then
self.batchGrid:resize(batchsize, self.depth, self.height, self.width, 4)
for i=1,batchsize do
self.batchGrid:select(1,i):copy(self.baseGrid)
end
end
self.output:resize(batchsize, self.depth, self.height, self.width, 4)
local flattenedBatchGrid = self.batchGrid:view(batchsize, self.depth*self.width*self.height, 4)
local flattenedOutput = self.output:view(batchsize, self.depth*self.width*self.height, 4)
torch.bmm(flattenedOutput, flattenedBatchGrid, transformMatrix:transpose(2,3))
if _transformMatrix:nDimension()==2 then
self.output = self.output:select(1,1)
end
return self.output
end
function PGG:updateGradInput(_transformMatrix, _gradGrid)
local transformMatrix, gradGrid
if _transformMatrix:nDimension()==2 then
transformMatrix = addOuterDim(_transformMatrix)
gradGrid = addOuterDim(_gradGrid)
else
transformMatrix = _transformMatrix
gradGrid = _gradGrid
end
local batchsize = transformMatrix:size(1)
local flattenedGradGrid = gradGrid:view(batchsize, self.depth*self.width*self.height, 4)
local flattenedBatchGrid = self.batchGrid:view(batchsize, self.depth*self.width*self.height, 4)
self.gradInput:resizeAs(transformMatrix):zero()
self.gradInput:baddbmm(flattenedGradGrid:transpose(2,3), flattenedBatchGrid) ---????
-- torch.baddbmm doesn't work on cudatensors for some reason
if _transformMatrix:nDimension()==2 then
self.gradInput = self.gradInput:select(1,1)
end
return self.gradInput
end