-
Notifications
You must be signed in to change notification settings - Fork 0
/
fcnInitializeResNetwork16s_3.m
108 lines (91 loc) · 2.82 KB
/
fcnInitializeResNetwork16s_3.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
function net = fcnInitializeResNetwork16s_3(net, varargin)
opts.rnn = false;
opts.nh = 512;
opts.nClass = 150;
opts.resLayer = 50;
opts.newLr = 1;
opts = vl_argparse(opts, varargin) ;
% nh = 512;
% nClass = 150;
nh = opts.nh;
nClass = opts.nClass;
%% Remove the last layer
net.removeLayer('deconv32') ;
filters = single(bilinear_u(4, nClass, nClass)) ;
net.addLayer('deconv32', ...
dagnn.ConvTranspose(...
'size', size(filters), ...
'upsample', 2, ...
'crop', 1, ...
'numGroups', nClass, ...
'hasBias', false), ...
'sum_1_out', 'x3', 'deconvf_1') ;
f = net.getParamIndex('deconvf_1') ;
net.params(f).value = filters ;
net.params(f).learningRate = 1 ;
net.params(f).weightDecay = 1 ;
net.addLayer('deconv32_softmax',dagnn.SoftMax(), 'x3', 'x3_softmax');
%% Add direct output from pool4
switch opts.resLayer
case 50
skip4_input = 'res4fx'
case 101
skip4_input = 'res4b22x'
case 152
skip4_input = 'res4b35x'
end
%% build skip network
skip_inputs = {};
switch opts.resLayer
case 50
% 50 layer
skip_inputs = {'res4ex', 'res4dx', 'res4cx', 'res4bx'};
case 101
% 101 layer
for ll = 1 : 21
if mod(ll,2) == 0
skip_inputs{end+1} = sprintf('res4b%dx',ll);
end
end
case 152
% 152 layer
for ll = 1 : 34
if mod(ll,3) == 0
skip_inputs{end+1} = sprintf('res4b%dx',ll);
end
end
end
skip_inputs = ['res4ax', skip_inputs, skip4_input];
[net, softmax_outs,classifier_out] = Softmax_skipNetwork(net, skip_inputs, 1024, 512, ...
nClass, opts.newLr, 'skip4');
net.addLayer('confidence16',Confidence(), ['x3_softmax',softmax_outs], 'confidence16');
% Add summation layer
if numel(skip_inputs) > 0
% net.addLayer('sum2', dagnn.Sum(), ['x3', classifier_out], 'x6');
net.addLayer('Weighted_sum_2', ...
DagGatedsum2('method', 'sum'), ...
['x3', classifier_out,'confidence16'], 'x6', 'WeightSum_param2');
f = net.getParamIndex('WeightSum_param2') ;
net.params(f).value = rnn_initialize(nClass,numel(['x3', classifier_out]),1) ;
net.params(f).learningRate = 0 ;
net.params(f).weightDecay = 0.001 ;
else
net.addLayer('sum2', dagnn.Sum(), {'x3', 'x5'}, 'x6') ;
end
%% Add deconvolution layers
filters = single(bilinear_u(16, nClass, nClass)) ;
net.addLayer('deconv16', ...
dagnn.ConvTranspose(...
'size', size(filters), ...
'upsample', 8, ...
'crop', 4, ...
'numGroups', nClass, ...
'hasBias', false), ...
'x6', 'prediction', 'deconvf') ;
f = net.getParamIndex('deconvf') ;
net.params(f).value = filters ;
net.params(f).learningRate = 1 ;
net.params(f).weightDecay = 1 ;
% Make the output of the bilinear interpolator is not discared for
% visualization purposes
net.vars(net.getVarIndex('prediction')).precious = 1 ;