Skip to content

Commit c4a5a19

Browse files
author
liuyu
committed
fix bug in mask layer
1 parent f1b8b04 commit c4a5a19

File tree

8 files changed

+146
-12
lines changed

8 files changed

+146
-12
lines changed

matlab/UNKNOWN.INFO

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
log20160920-182811.14030

matlab/debug.m

+5-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
caffe.init_log('log/log')
2-
t = caffe.get_solver('solver_80k110k_lr1_3.prototxt', 0:3);
3-
t.step(1);
4-
fprintf('done\n');
1+
caffe.reset_all;
2+
s = caffe.get_solver('test_model/solver.prototxt', 0);
3+
input{1}{1} = rand(4,4,2,1,'single');
4+
input{1}{2} = [0 0 0 1; 0 1 0 0; 0 0 0 1; 1 0 0 0];
5+

matlab/log20160920-182811.14030

+94
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
Log file created at: 2016/09/20 18:28:11
2+
Running on machine: BJ-IDC1-10-10-10-126
3+
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
4+
I0920 18:28:11.954001 14106 solver.cpp:48] Initializing solver from parameters:
5+
base_lr: 0.001
6+
display: 20
7+
max_iter: 110000
8+
lr_policy: "step"
9+
gamma: 0.1
10+
momentum: 0.9
11+
weight_decay: 0.0005
12+
stepsize: 80000
13+
snapshot: 0
14+
device_id: 0
15+
net: "test_model/train_val.prototxt"
16+
I0920 18:28:11.954223 14106 solver.cpp:91] Creating training net from net file: test_model/train_val.prototxt
17+
I0920 18:28:11.954322 14106 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: test_model/train_val.prototxt
18+
I0920 18:28:11.954341 14106 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
19+
W0920 18:28:11.954345 14106 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
20+
I0920 18:28:11.954548 14106 net.cpp:49] Initializing net from parameters:
21+
name: "test-mask-layer"
22+
state {
23+
phase: TRAIN
24+
}
25+
layer {
26+
name: "input"
27+
type: "Input"
28+
top: "data"
29+
top: "mask"
30+
input_param {
31+
shape {
32+
dim: 1
33+
dim: 3
34+
dim: 4
35+
dim: 4
36+
}
37+
shape {
38+
dim: 1
39+
dim: 1
40+
dim: 4
41+
dim: 4
42+
}
43+
}
44+
}
45+
layer {
46+
type: "Mask"
47+
bottom: "data"
48+
bottom: "mask"
49+
top: "masked"
50+
}
51+
I0920 18:28:11.954583 14106 layer_factory.hpp:77] Creating layer input
52+
I0920 18:28:11.954598 14106 net.cpp:91] Creating Layer input
53+
I0920 18:28:11.954607 14106 net.cpp:409] input -> data
54+
I0920 18:28:11.954627 14106 net.cpp:409] input -> mask
55+
I0920 18:28:12.835309 14106 net.cpp:141] Setting up input
56+
I0920 18:28:12.835350 14106 net.cpp:148] Top shape: 1 3 4 4 (48)
57+
I0920 18:28:12.835353 14106 net.cpp:148] Top shape: 1 1 4 4 (16)
58+
I0920 18:28:12.835356 14106 net.cpp:156] Memory required for data: 256
59+
I0920 18:28:12.835362 14106 layer_factory.hpp:77] Creating layer
60+
I0920 18:28:12.835376 14106 net.cpp:91] Creating Layer
61+
I0920 18:28:12.835378 14106 net.cpp:435] <- data
62+
I0920 18:28:12.835383 14106 net.cpp:435] <- mask
63+
I0920 18:28:12.835386 14106 net.cpp:409] -> masked
64+
I0920 18:28:12.842926 14106 net.cpp:141] Setting up
65+
I0920 18:28:12.842947 14106 net.cpp:148] Top shape: 1 3 0 1 (0)
66+
I0920 18:28:12.842953 14106 net.cpp:156] Memory required for data: 256
67+
I0920 18:28:12.842960 14106 net.cpp:219] does not need backward computation.
68+
I0920 18:28:12.842967 14106 net.cpp:219] input does not need backward computation.
69+
I0920 18:28:12.842973 14106 net.cpp:261] This network produces output masked
70+
I0920 18:28:12.842983 14106 net.cpp:274] Network initialization done.
71+
I0920 18:28:12.843050 14106 solver.cpp:60] Solver scaffolding done.
72+
I0920 18:28:12.858101 14106 parallel.cpp:405] GPUs pairs 0:1, 2:3, 0:2
73+
I0920 18:28:13.429554 14106 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: test_model/train_val.prototxt
74+
I0920 18:28:13.429597 14106 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
75+
W0920 18:28:13.429601 14106 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
76+
I0920 18:28:13.429656 14106 net.cpp:84] Sharing layer input from root net
77+
I0920 18:28:13.430806 14106 net.cpp:134] Created top blob 0 (shape: 1 3 4 4 (48)) for shared layer input
78+
I0920 18:28:13.430853 14106 net.cpp:134] Created top blob 1 (shape: 1 1 4 4 (16)) for shared layer input
79+
I0920 18:28:14.011651 14106 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: test_model/train_val.prototxt
80+
I0920 18:28:14.011693 14106 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
81+
W0920 18:28:14.011698 14106 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
82+
I0920 18:28:14.011746 14106 net.cpp:84] Sharing layer input from root net
83+
I0920 18:28:14.012816 14106 net.cpp:134] Created top blob 0 (shape: 1 3 4 4 (48)) for shared layer input
84+
I0920 18:28:14.012874 14106 net.cpp:134] Created top blob 1 (shape: 1 1 4 4 (16)) for shared layer input
85+
I0920 18:28:14.013025 14106 parallel.cpp:234] GPU 2 does not have p2p access to GPU 0
86+
I0920 18:28:14.592844 14106 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: test_model/train_val.prototxt
87+
I0920 18:28:14.592874 14106 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
88+
W0920 18:28:14.592877 14106 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
89+
I0920 18:28:14.592959 14106 net.cpp:84] Sharing layer input from root net
90+
I0920 18:28:14.594660 14106 net.cpp:134] Created top blob 0 (shape: 1 3 4 4 (48)) for shared layer input
91+
I0920 18:28:14.594740 14106 net.cpp:134] Created top blob 1 (shape: 1 1 4 4 (16)) for shared layer input
92+
I0920 18:28:15.121878 14106 caffe_.cpp:330] Starting Optimization
93+
W0920 18:30:46.255520 14106 net.hpp:45] DEPRECATED: ForwardPrefilled() will be removed in a future version. Use Forward().
94+
I0920 20:35:43.931830 14106 caffe_.cpp:857] Protobuf : File already exists in database: caffe.proto . at google/protobuf/descriptor_database.cc Line 57

matlab/test.m

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
caffe.init_log('log/log')
2+
t = caffe.get_solver('solver_80k110k_lr1_3.prototxt', 0:3);
3+
t.step(1);
4+
fprintf('done\n');

matlab/test_model/solver.prototxt

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
net: "test_model/train_val.prototxt"
2+
base_lr: 0.001
3+
lr_policy: "step"
4+
gamma: 0.1
5+
stepsize: 80000
6+
display: 20
7+
max_iter: 110000
8+
momentum: 0.9
9+
weight_decay: 0.0005
10+
# We disable standard caffe solver snapshotting and implement our own snapshot
11+
snapshot: 0
12+
#debug_info: true
13+

matlab/test_model/train_val.prototxt

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: "test-mask-layer"
2+
3+
input: "data"
4+
input_dim: 1
5+
input_dim: 3
6+
input_dim: 4
7+
input_dim: 4
8+
9+
input : "mask"
10+
input_dim: 1
11+
input_dim: 1
12+
input_dim: 4
13+
input_dim: 4
14+
15+
layer {
16+
bottom: "data"
17+
bottom: "mask"
18+
top: "masked"
19+
type: "Mask"
20+
}

src/caffe/layers/mask_layer.cpp

+9-7
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ namespace caffe {
2424
w_ = bottom[ 0 ]->width();
2525
h_ = bottom[ 0 ]->height();
2626
vector<int> top_shape;
27-
Dtype* mask = bottom[ 1 ]->cpu_data();
27+
const Dtype* mask = bottom[ 1 ]->cpu_data();
2828
validnum_ = caffe_cpu_asum(bottom[ 1 ]->count(), mask);
2929
top[ 0 ]->Reshape(n_, c_, validnum_, 1);
3030
pass_idx_.Reshape(1, 1, validnum_, 1);
@@ -33,8 +33,8 @@ namespace caffe {
3333
template <typename Dtype>
3434
void MaskLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
3535
const vector<Blob<Dtype>*>& top) {
36-
Dtype* mask = bottom[ 1 ]->cpu_data();
37-
Dtype* bottom_data = bottom[ 0 ]->cpu_data();
36+
const Dtype* mask = bottom[ 1 ]->cpu_data();
37+
const Dtype* bottom_data = bottom[ 0 ]->cpu_data();
3838
Dtype* top_data = top[ 0 ]->mutable_cpu_data();
3939
int idx = 0;
4040
for ( int y = 0; y < h_; y++ ){
@@ -45,6 +45,7 @@ namespace caffe {
4545
{
4646
top_data[ c*validnum_ + idx ] = bottom_data[ c*w_*h_ + y*w_ + x ];
4747
}
48+
idx++;
4849
}
4950
else{
5051

@@ -55,10 +56,10 @@ namespace caffe {
5556

5657
template <typename Dtype>
5758
void MaskLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& bottom,
58-
const vector<Blob<Dtype>*>& top) {
59-
Dtype* bottom_diff = bottom[ 0 ]->cpu_diff();
60-
Dtype* mask = bottom[ 1 ]->cpu_data();
61-
Dtype* top_diff = top[ 0 ]->cpu_diff();
59+
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& top) {
60+
Dtype* bottom_diff = bottom[ 0 ]->mutable_cpu_diff();
61+
const Dtype* mask = bottom[ 1 ]->cpu_data();
62+
const Dtype* top_diff = top[ 0 ]->cpu_diff();
6263
int idx = 0;
6364
for ( int y = 0; y < h_; y++ ){
6465
for ( int x = 0; x < w_; x++ ){
@@ -68,6 +69,7 @@ namespace caffe {
6869
{
6970
bottom_diff[ c*w_*h_ + y*w_ + x ] = top_diff[ c*validnum_ + idx ];
7071
}
72+
idx++;
7173
}
7274
else{
7375

src/caffe/proto/caffe.proto

-1
Original file line numberDiff line numberDiff line change
@@ -386,7 +386,6 @@ message LayerParameter {
386386
optional InputParameter input_param = 143;
387387
optional LogParameter log_param = 134;
388388
optional LRNParameter lrn_param = 118;
389-
optional MaskParameter mask_param = 190;
390389
optional MemoryDataParameter memory_data_param = 119;
391390
optional MVNParameter mvn_param = 120;
392391
optional NormalizeParameter normalize_param = 147;

0 commit comments

Comments
 (0)