forked from torch/nn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SelectTable.lua
58 lines (50 loc) · 1.36 KB
/
SelectTable.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
local SelectTable, parent = torch.class('nn.SelectTable', 'nn.Module')
function SelectTable:__init(index)
parent.__init(self)
self.index = index
self.gradInput = {}
end
function SelectTable:updateOutput(input)
assert(math.abs(self.index) <= #input, "arg 1 table idx out of range")
if self.index < 0 then
self.output = input[#input + self.index + 1]
else
self.output = input[self.index]
end
return self.output
end
local function zeroTableCopy(t1, t2)
for k, v in pairs(t2) do
if (torch.type(v) == "table") then
t1[k] = zeroTableCopy(t1[k] or {}, t2[k])
else
if not t1[k] then
t1[k] = v:clone():zero()
else
local tensor = t1[k]
if not tensor:isSameSizeAs(v) then
t1[k]:resizeAs(v)
t1[k]:zero()
end
end
end
end
return t1
end
function SelectTable:updateGradInput(input, gradOutput)
if self.index < 0 then
self.gradInput[#input + self.index + 1] = gradOutput
else
self.gradInput[self.index] = gradOutput
end
zeroTableCopy(self.gradInput, input)
for i=#input+1, #self.gradInput do
self.gradInput[i] = nil
end
return self.gradInput
end
function SelectTable:type(type, tensorCache)
self.gradInput = {}
self.output = {}
return parent.type(self, type, tensorCache)
end