-
Notifications
You must be signed in to change notification settings - Fork 8
/
TypeDecorator.lua
61 lines (47 loc) · 1.55 KB
/
TypeDecorator.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
local TypeDecorator, parent = torch.class('nn.TypeDecorator', 'nn.Module')
function TypeDecorator:__init(module)
parent.__init(self)
self.module = module
end
function TypeDecorator:type(type)
-- its a fixed type, so do nothing (the reason for this decorator)
return
end
function TypeDecorator:updateOutput(input)
self.output = self.module:updateOutput(input)
return self.output
end
function TypeDecorator:updateGradInput(input, gradOutput)
self.gradInput = self.module:updateGradInput(input, gradOutput)
return self.gradInput
end
function TypeDecorator:accGradParameters(input, gradOutput, scale)
return self.module:accGradParameters(input, gradOutput, scale)
end
function TypeDecorator:accUpdateGradParameters(input, gradOutput, lr)
return self.module:accUpdateGradParameters(input, gradOutput, lr)
end
function TypeDecorator:sharedAccUpdateGradParameters(input, gradOutput, lr)
return self.module:sharedAccUpdateGradParameters(input, gradOutput, lr)
end
function TypeDecorator:parameters()
return self.module:parameters()
end
function TypeDecorator:zeroGradParameters()
self.module:zeroGradParameters()
end
function TypeDecorator:updateParameters(learningRate)
self.module:updateParameters(learningRate)
end
function TypeDecorator:share(mlp, ...)
self.module:share(mlp, ...)
end
function TypeDecorator:reset()
self.module:reset()
end
function TypeDecorator:getParameters()
return self.module:getParameters()
end
function TypeDecorator:__call__(input, gradOutput)
return self.module:__call__(input, gradOutput)
end