-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest.lua
More file actions
113 lines (85 loc) · 2.95 KB
/
test.lua
File metadata and controls
113 lines (85 loc) · 2.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
require 'nn'
require 'DeformableConvolution'
require 'SlowSpatialConvolution'
local mytest1 = torch.TestSuite()
local mytest2 = torch.TestSuite()
local mytest3 = torch.TestSuite()
local mytest4 = torch.TestSuite()
local mytest5 = torch.TestSuite()
local mytester = torch.Tester()
local precision = 1e-5
local jac = nn.Jacobian
local from = 3
local ki = 4
local kj = 3
local inj = 8
local ini = 8
function mytest1.UpdateGradInput()
local to = math.random(1,5)
local module = nn.DeformableConvolution(from, to, ki, kj)
local input = torch.Tensor(from, inj, ini):zero()
local function test1(module)
local err, diff = nn.Jacobian.testJacobian(module, input)
if err >= precision then
--print(diff)
end
mytester:assertlt(err, precision, 'error on gradient w.r.t. input ')
end
test1(module)
end
function mytest2.AccGradWeight()
local to = math.random(1,5)
local module = nn.DeformableConvolution(from, to, ki, kj)
local input = torch.Tensor(from, inj, ini):zero()
local function test2(module)
local err, diff = jac.testJacobianParameters(module, input, module.weight, module.gradWeight)
if err >= precision then
--print(diff)
print(diff[{{1, from*ki*kj*to},{}}]:max())
end
mytester:assertlt(err , precision, 'error on gradient w.r.t. weight ')
end
test2(module)
end
function mytest3.AccGradBias()
local to = math.random(1,5)
local module = nn.DeformableConvolution(from, to, ki, kj)
local input = torch.Tensor(from, inj, ini):zero()
local function test3(module)
local err, diff = jac.testJacobianParameters(module, input, module.bias, module.gradBias)
if( err >= precision) then
print(diff[{{1,to},{}}]:max())
end
mytester:assertlt(err , precision, 'error on gradient w.r.t. bias ')
end
test3(module)
end
function mytest4.AccGradWeightDirect()
local to = math.random(1,5)
local module = nn.DeformableConvolution(from, to, ki, kj)
local input = torch.Tensor(from, inj, ini):zero()
local function test4(module)
local err = jac.testJacobianUpdateParameters(module, input, module.weight)
mytester:assertlt(err , precision, 'error on gradient w.r.t. weight [direct update] ')
end
test4(module)
end
function mytest5.AccGradBiasDirect()
local to = math.random(1,5)
local module = nn.DeformableConvolution(from, to, ki, kj)
local input = torch.Tensor(from, inj, ini):zero()
local function test5(module)
local err,diff = jac.testJacobianUpdateParameters(module, input, module.bias)
mytester:assertlt(err , precision, 'error on gradient w.r.t. bias [direct update] ')
if( err >= precision) then
--print(diff)
end
end
test5(module)
end
mytester:add(mytest1)
mytester:add(mytest2)
mytester:add(mytest3)
mytester:add(mytest4)
mytester:add(mytest5)
mytester:run()