-
Notifications
You must be signed in to change notification settings - Fork 29
/
Copy pathmain.lua
72 lines (63 loc) · 2.46 KB
/
main.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
require 'torch'
require 'math'
local loader = require 'iris_loader'
local train = require 'train'
torch.manualSeed(1)
local data = loader.load_data()
local opt = {
nonlinearity_type = 'sigmoid',
training_iterations = 150, -- note: the code uses *batches*, not *minibatches*, now.
print_every = 25, -- how many iterations to skip between printing the loss
}
-- train sigmoid and requ versions
model_sigmoid, losses_sigmoid = train(opt, data)
-- TODO: uncomment once you implement requ
--opt.nonlinearity_type = 'requ'
--model_requ, losses_requ = train(opt, data)
--------------------------------------------------------
-- EVALUATION STUFF: YOU CAN IGNORE ALL THIS CODE
-- NOTE: though we don't have a test set, but we'll plot the two training loss curves
-- We won't know if we overfit, but we can see how flexible our model is.
-- plot
gnuplot.figure()
gnuplot.plot({'sigmoid',
torch.range(1, #losses_sigmoid), -- x-coordinates
torch.Tensor(losses_sigmoid), -- y-coordinates
'-'}
-- TODO: uncomment when you implement requ
-- , {'requ',
-- torch.range(1, #losses_requ), -- x-coordinates
-- torch.Tensor(losses_requ), -- y-coordinates
-- '-'}
)
models = {
--requ = model_requ, -- TODO: uncomment once you implement requ
sigmoid = model_sigmoid
}
for model_name, model in pairs(models) do
-- classification error on train set
local log_probs = model:forward(data.inputs)
local _, predictions = torch.max(log_probs, 2)
print(string.format('# correct for %s:', model_name))
print(torch.mean(torch.eq(predictions:long(), data.targets:long()):double()))
-- classification region in one slice (cf. Figure 1 scatterplots in writeup)
-- not pretty, but the best we can do without hacking away at gnuplot or using another library
local f1 = 4 -- feature on first axis
local f2 = 3 -- feature on second axis
local size = 60 -- resolution
local f1grid = torch.linspace(data.inputs[{{},f1}]:min(), data.inputs[{{},f1}]:max(), size)
local f2grid = torch.linspace(data.inputs[{{},f2}]:min(), data.inputs[{{},f2}]:max(), size)
local result = torch.Tensor(size, size)
local input = data.inputs[1]:clone()
for i=1,size do
input[f1] = f1grid[i]
for j=1,size do
input[f2] = f2grid[j]
result[{i,j}] = math.exp(model:forward(input)[1])
end
end
result[1][1] = 0 -- ugly hack to get the right scale
result[1][2] = 1 -- ugly hack to get the right scale
gnuplot.figure()
gnuplot.imagesc(result, model_name)
end