-
Notifications
You must be signed in to change notification settings - Fork 20
/
opts.lua
executable file
·126 lines (117 loc) · 6.18 KB
/
opts.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
--
-- Copyright (c) 2016, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
--
local M = { }
function M.parse(arg)
local cmd = torch.CmdLine()
cmd:text()
cmd:text('Torch-7 ResNet Training script')
cmd:text('See https://github.com/facebook/fb.resnet.torch/blob/master/TRAINING.md for examples')
cmd:text()
cmd:text('Options:')
------------ General options --------------------
cmd:option('-data', '', 'Path to dataset')
cmd:option('-dataset', 'imagenet', 'Options: imagenet | cifar10 | cifar100')
cmd:option('-manualSeed', 0, 'Manually set RNG seed')
cmd:option('-nGPU', 1, 'Number of GPUs to use by default')
cmd:option('-backend', 'cudnn', 'Options: cudnn | cunn')
cmd:option('-cudnn', 'fastest', 'Options: fastest | default | deterministic')
cmd:option('-gen', 'gen', 'Path to save generated files')
------------- Data options ------------------------
cmd:option('-nThreads', 2, 'number of data loading threads')
------------- Training options --------------------
cmd:option('-nEpochs', 0, 'Number of total epochs to run')
cmd:option('-epochNumber', 1, 'Manual epoch number (useful on restarts)')
cmd:option('-batchSize', 32, 'mini-batch size (1 = pure stochastic)')
cmd:option('-testOnly', 'false', 'Run on validation set only')
cmd:option('-tenCrop', 'false', 'Ten-crop testing')
cmd:option('-sequenceOut', 'false', 'sequence loss or not')
cmd:option('-coarsefine', 'false', 'If using this criterion or not')
cmd:option('-coarsefine2', 'false', 'If using this criterion or not')
cmd:option('-coarsefine3', 'false', 'If using this criterion or not')
cmd:option('-coarsefine4', 'false', 'If using this criterion or not')
cmd:option('-coarsefine5', 'false', 'If using this criterion or not')
cmd:option('-coarsefine6', 'false', 'If using this criterion or not')
cmd:option('-decayfine', 'false', 'If using this criterion or not')
cmd:option('-decayfine2', 'false', 'If using this criterion or not')
cmd:option('-decayfine3', 'false', 'If using this criterion or not')
------------- Checkpointing options ---------------
cmd:option('-save', 'checkpoints', 'Directory in which to save checkpoints')
cmd:option('-resume', 'none', 'Resume from the latest checkpoint in this directory')
---------- Optimization options ----------------------
cmd:option('-LR', 0.1, 'initial learning rate')
cmd:option('-momentum', 0.9, 'momentum')
cmd:option('-weightDecay', 1e-4, 'weight decay')
---------- Model options ----------------------------------
cmd:option('-netType', 'resnet', 'Options: resnet | preresnet')
cmd:option('-depth', 34, 'ResNet depth: 18 | 34 | 50 | 101 | ...', 'number')
cmd:option('-shortcutType', '', 'Options: A | B | C')
cmd:option('-retrain', 'none', 'Path to model to retrain with')
cmd:option('-optim', 'sgd', 'optimization method')
cmd:option('-optimState', 'none', 'Path to an optimState to reload from')
-- Our model configurations -----------------------------
cmd:option('-seqLength', 4, 'length of the recurrent cells')
---------- Model options ----------------------------------
cmd:option('-shareGradInput', 'false', 'Share gradInput tensors to reduce memory usage')
cmd:option('-optnet', 'false', 'Use optnet to reduce memory usage')
cmd:option('-resetClassifier', 'false', 'Reset the fully connected layer for fine-tuning')
cmd:option('-nClasses', 0, 'Number of classes in the dataset')
cmd:text()
local opt = cmd:parse(arg or {})
opt.testOnly = opt.testOnly ~= 'false'
opt.tenCrop = opt.tenCrop ~= 'false'
opt.shareGradInput = opt.shareGradInput ~= 'false'
opt.optnet = opt.optnet ~= 'false'
opt.resetClassifier = opt.resetClassifier ~= 'false'
opt.sequenceOut = opt.sequenceOut ~= 'false'
opt.coarsefine = opt.coarsefine ~= 'false'
opt.coarsefine2 = opt.coarsefine2 ~= 'false'
opt.coarsefine3 = opt.coarsefine3 ~= 'false'
opt.coarsefine4 = opt.coarsefine4 ~= 'false'
opt.coarsefine5 = opt.coarsefine5 ~= 'false'
opt.coarsefine6 = opt.coarsefine6 ~= 'false'
opt.decayfine = opt.decayfine ~= 'false'
opt.decayfine2 = opt.decayfine2 ~= 'false'
opt.decayfine3 = opt.decayfine3 ~= 'false'
if not paths.dirp(opt.save) and not paths.mkdir(opt.save) then
cmd:error('error: unable to create checkpoint directory: ' .. opt.save .. '\n')
end
if opt.dataset == 'imagenet' then
-- Handle the most common case of missing -data flag
local trainDir = paths.concat(opt.data, 'train')
if not paths.dirp(opt.data) then
cmd:error('error: missing ImageNet data directory')
elseif not paths.dirp(trainDir) then
cmd:error('error: ImageNet missing `train` directory: ' .. trainDir)
end
-- Default shortcutType=B and nEpochs=90
opt.shortcutType = opt.shortcutType == '' and 'B' or opt.shortcutType
opt.nEpochs = opt.nEpochs == 0 and 90 or opt.nEpochs
elseif opt.dataset == 'cifar10' then
-- Default shortcutType=A and nEpochs=164
opt.shortcutType = opt.shortcutType == '' and 'A' or opt.shortcutType
opt.nEpochs = opt.nEpochs == 0 and 164 or opt.nEpochs
elseif opt.dataset == 'cifar100' then
-- Default shortcutType=A and nEpochs=164
opt.shortcutType = opt.shortcutType == '' and 'A' or opt.shortcutType
opt.nEpochs = opt.nEpochs == 0 and 164 or opt.nEpochs
else
cmd:error('unknown dataset: ' .. opt.dataset)
end
if opt.resetClassifier then
if opt.nClasses == 0 then
cmd:error('-nClasses required when resetClassifier is set')
end
end
if opt.shareGradInput and opt.optnet then
cmd:error('error: cannot use both -shareGradInput and -optnet')
end
print (opt)
return opt
end
return M