Я добавляю новый слой в caffe, и появляется ошибка об «layer_param_»

Я новичок в Кафе и не хочу проверять деконволюцию. Я добавляю два новых слоя (слой unpooling и слой BN).
Во время теста произошла ошибка, и это информация об ошибке:

F0715 16: 53: 43.956820 5838 unpooling_layer.cpp: 29] Проверка не удалась:! Unpool_param.has_kernel_size ()! =! (Unpool_param.has_kernel_h () && unpool_param.has_kernel_w ()) Размер фильтра равен kernel_size ИЛИ kernel_h и kernel_w; не оба
* Проверьте трассировку стека ошибок: *
Прервано (ядро сброшено)

Это весь сценарий:

I0715 16:53:43.953850  5838 upgrade_proto.cpp:53] Attempting to upgrade input file specified using deprecated V1LayerParameter: deconv.prototxt
I0715 16:53:43.954031  5838 upgrade_proto.cpp:61] Successfully upgraded file specified using deprecated V1LayerParameter
I0715 16:53:43.954062  5838 upgrade_proto.cpp:67] Attempting to upgrade input file specified using deprecated input fields: deconv.prototxt
I0715 16:53:43.954092  5838 upgrade_proto.cpp:70] Successfully upgraded file specified using deprecated input fields.
W0715 16:53:43.954098  5838 upgrade_proto.cpp:72] Note that future Caffe releases will only support input layers and not input fields.
I0715 16:53:43.954301  5838 net.cpp:51] Initializing net from parameters:
name: "Deconv_test"state {
phase: TEST
level: 0
}
layer {
name: "input"type: "Input"top: "data"input_param {
shape {
dim: 1
dim: 3
dim: 224
dim: 224
}
}
}
layer {
name: "conv1_1"type: "Convolution"bottom: "data"top: "conv1_1"param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "bn1_1"type: "BN"bottom: "conv1_1"top: "conv1_1"}
layer {
name: "relu1_1"type: "ReLU"bottom: "conv1_1"top: "conv1_1"}
layer {
name: "conv1_2"type: "Convolution"bottom: "conv1_1"top: "conv1_2"param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "bn1_2"type: "BN"bottom: "conv1_2"top: "conv1_2"}
layer {
name: "relu1_2"type: "ReLU"bottom: "conv1_2"top: "conv1_2"}
layer {
name: "pool1"type: "Pooling"bottom: "conv1_2"top: "pool1"top: "pool1_mask"pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "unpool1"type: "Unpooling"bottom: "pool1"bottom: "pool1_mask"top: "unpool1"}
layer {
name: "deconv1_1"type: "Deconvolution"bottom: "unpool1"top: "deconv1_1"param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
weight_filler {
type: "gaussian"std: 0.01
}
bias_filler {
type: "constant"value: 0
}
}
}
layer {
name: "debn1_1"type: "BN"bottom: "deconv1_1"top: "deconv1_1"}
layer {
name: "derelu1_1"type: "ReLU"bottom: "deconv1_1"top: "deconv1_1"}
layer {
name: "deconv1_2"type: "Deconvolution"bottom: "deconv1_1"top: "deconv1_2"param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
weight_filler {
type: "gaussian"std: 0.01
}
bias_filler {
type: "constant"value: 0
}
}
}
layer {
name: "debn1_2"type: "BN"bottom: "deconv1_2"top: "deconv1_2"}
layer {
name: "derelu1_2"type: "ReLU"bottom: "deconv1_2"top: "deconv1_2"}
layer {
name: "seg-score-voc"type: "Convolution"bottom: "deconv1_2"top: "seg-score"param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 21
kernel_size: 1
weight_filler {
type: "gaussian"std: 0.01
}
bias_filler {
type: "constant"value: 0
}
}
}
I0715 16:53:43.954690  5838 layer_factory.hpp:77] Creating layer input
I0715 16:53:43.954740  5838 net.cpp:84] Creating Layer input
I0715 16:53:43.954752  5838 net.cpp:380] input -> data
I0715 16:53:43.954807  5838 net.cpp:122] Setting up input
I0715 16:53:43.954823  5838 net.cpp:129] Top shape: 1 3 224 224 (150528)
I0715 16:53:43.954828  5838 net.cpp:137] Memory required for data: 602112
I0715 16:53:43.954839  5838 layer_factory.hpp:77] Creating layer conv1_1
I0715 16:53:43.954865  5838 net.cpp:84] Creating Layer conv1_1
I0715 16:53:43.954876  5838 net.cpp:406] conv1_1 <- data
I0715 16:53:43.954898  5838 net.cpp:380] conv1_1 -> conv1_1
I0715 16:53:43.955159  5838 net.cpp:122] Setting up conv1_1
I0715 16:53:43.955174  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955179  5838 net.cpp:137] Memory required for data: 13447168
I0715 16:53:43.955215  5838 layer_factory.hpp:77] Creating layer bn1_1
I0715 16:53:43.955237  5838 net.cpp:84] Creating Layer bn1_1
I0715 16:53:43.955246  5838 net.cpp:406] bn1_1 <- conv1_1
I0715 16:53:43.955265  5838 net.cpp:367] bn1_1 -> conv1_1 (in-place)
I0715 16:53:43.955569  5838 net.cpp:122] Setting up bn1_1
I0715 16:53:43.955579  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955585  5838 net.cpp:137] Memory required for data: 26292224
I0715 16:53:43.955611  5838 layer_factory.hpp:77] Creating layer relu1_1
I0715 16:53:43.955628  5838 net.cpp:84] Creating Layer relu1_1
I0715 16:53:43.955649  5838 net.cpp:406] relu1_1 <- conv1_1
I0715 16:53:43.955665  5838 net.cpp:367] relu1_1 -> conv1_1 (in-place)
I0715 16:53:43.955680  5838 net.cpp:122] Setting up relu1_1
I0715 16:53:43.955688  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.955693  5838 net.cpp:137] Memory required for data: 39137280
I0715 16:53:43.955699  5838 layer_factory.hpp:77] Creating layer conv1_2
I0715 16:53:43.955723  5838 net.cpp:84] Creating Layer conv1_2
I0715 16:53:43.955730  5838 net.cpp:406] conv1_2 <- conv1_1
I0715 16:53:43.955749  5838 net.cpp:380] conv1_2 -> conv1_2
I0715 16:53:43.956133  5838 net.cpp:122] Setting up conv1_2
I0715 16:53:43.956148  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956153  5838 net.cpp:137] Memory required for data: 51982336
I0715 16:53:43.956182  5838 layer_factory.hpp:77] Creating layer bn1_2
I0715 16:53:43.956198  5838 net.cpp:84] Creating Layer bn1_2
I0715 16:53:43.956207  5838 net.cpp:406] bn1_2 <- conv1_2
I0715 16:53:43.956223  5838 net.cpp:367] bn1_2 -> conv1_2 (in-place)
I0715 16:53:43.956513  5838 net.cpp:122] Setting up bn1_2
I0715 16:53:43.956524  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956528  5838 net.cpp:137] Memory required for data: 64827392
I0715 16:53:43.956544  5838 layer_factory.hpp:77] Creating layer relu1_2
I0715 16:53:43.956558  5838 net.cpp:84] Creating Layer relu1_2
I0715 16:53:43.956567  5838 net.cpp:406] relu1_2 <- conv1_2
I0715 16:53:43.956583  5838 net.cpp:367] relu1_2 -> conv1_2 (in-place)
I0715 16:53:43.956598  5838 net.cpp:122] Setting up relu1_2
I0715 16:53:43.956604  5838 net.cpp:129] Top shape: 1 64 224 224 (3211264)
I0715 16:53:43.956609  5838 net.cpp:137] Memory required for data: 77672448
I0715 16:53:43.956615  5838 layer_factory.hpp:77] Creating layer pool1
I0715 16:53:43.956630  5838 net.cpp:84] Creating Layer pool1
I0715 16:53:43.956637  5838 net.cpp:406] pool1 <- conv1_2
I0715 16:53:43.956655  5838 net.cpp:380] pool1 -> pool1
I0715 16:53:43.956674  5838 net.cpp:380] pool1 -> pool1_mask
I0715 16:53:43.956704  5838 net.cpp:122] Setting up pool1
I0715 16:53:43.956715  5838 net.cpp:129] Top shape: 1 64 112 112 (802816)
I0715 16:53:43.956723  5838 net.cpp:129] Top shape: 1 64 112 112 (802816)
I0715 16:53:43.956727  5838 net.cpp:137] Memory required for data: 84094976
I0715 16:53:43.956734  5838 layer_factory.hpp:77] Creating layer unpool1
I0715 16:53:43.956753  5838 net.cpp:84] Creating Layer unpool1
I0715 16:53:43.956760  5838 net.cpp:406] unpool1 <- pool1
I0715 16:53:43.956775  5838 net.cpp:406] unpool1 <- pool1_mask
I0715 16:53:43.956789  5838 net.cpp:380] unpool1 -> unpool1
kernel_size:0has_kernel_h:0has_kernel_w:0
F0715 16:53:43.956820  5838 unpooling_layer.cpp:29] Check failed: !unpool_param.has_kernel_size() != !(unpool_param.has_kernel_h() && unpool_param.has_kernel_w()) Filter size is kernel_size OR kernel_h and kernel_w; not both
*** Check failure stack trace: ***
Aborted (core dumped)

Я печатаю значения kernel_size, has_kernel_h и has_kernel_w. которые все 0.

Это мой файл deploy.prototxt.

name: "Deconv_test"input: "data"input_dim: 1
input_dim: 3
input_dim: 224
input_dim: 224
#data:224*224
layers
{
bottom: "data"top: "conv1_1"name: "conv1_1"type: CONVOLUTION
blobs_lr: 1 blobs_lr: 2
weight_decay: 1 weight_decay: 0
convolution_param {
num_output: 64
pad: 1
kernel_size: 3  }
}
#conv1_1
layers
{
bottom: 'conv1_1'
top: 'conv1_1'
name: 'bn1_1'
type: BN
bn_param {
scale_filler { type: 'constant' value: 1 }
shift_filler { type: 'constant' value: 0.001 }
bn_mode: INFERENCE}
}
layers {
bottom: "conv1_1"top: "conv1_1"name: "relu1_1"type: RELU}
# conv1_2
layers
{
bottom: "conv1_1"top: "conv1_2"name: "conv1_2"type: CONVOLUTION
blobs_lr: 1 blobs_lr: 2
weight_decay: 1 weight_decay: 0
convolution_param {
num_output: 64
pad: 1
kernel_size: 3 }
}
layers
{
bottom: 'conv1_2'
top: 'conv1_2'
name: 'bn1_2'
type: BN
bn_param {
scale_filler { type: 'constant' value: 1 }
shift_filler { type: 'constant' value: 0.001 }
bn_mode: INFERENCE }
}
layers {
bottom: "conv1_2"top: "conv1_2"name: "relu1_2"type: RELU}
# pool1
layers
{
bottom: "conv1_2"top: "pool1"top:"pool1_mask"name: "pool1"type: POOLING
pooling_param {
pool: MAX
kernel_size: 2
stride: 2  }
}
# unpool1
layers
{
type: UNPOOLING
bottom: "pool1"bottom: "pool1_mask"top: "unpool1"name: "unpool1"unpooling_param {
unpool: MAX
kernel_size: 2
stride: 2
unpool_size: 224
}
}
# deconv1_1
layers {
bottom: 'unpool1'
top: 'deconv1_1'
name: 'deconv1_1'
type: DECONVOLUTION
blobs_lr: 1 blobs_lr: 2
weight_decay: 1 weight_decay: 0
convolution_param {
num_output:64
pad:1
kernel_size: 3
weight_filler {      type: "gaussian"      std: 0.01    }
bias_filler {      type: "constant"      value: 0    }
}
}
layers
{
bottom: 'deconv1_1'
top: 'deconv1_1'
name: 'debn1_1'
type: BN
bn_param {
scale_filler { type: 'constant' value: 1 }
shift_filler { type: 'constant' value: 0.001 }
bn_mode: INFERENCE }
}
layers {
bottom: 'deconv1_1'
top: 'deconv1_1'
name: 'derelu1_1'
type: RELU
}
# deconv1_2
layers
{
bottom: 'deconv1_1'
top: 'deconv1_2'
name: 'deconv1_2'
type: DECONVOLUTION
blobs_lr: 1
blobs_lr: 2
weight_decay: 1
weight_decay: 0
convolution_param {
num_output:64
pad:1
kernel_size: 3
weight_filler {      type: "gaussian"      std: 0.01    }
bias_filler {      type: "constant"      value: 0    }
}
}
layers
{
bottom: 'deconv1_2'
top: 'deconv1_2'
name: 'debn1_2'
type: BN
bn_param { scale_filler { type: 'constant' value: 1 }
shift_filler { type: 'constant' value: 0.001 }
bn_mode: INFERENCE } }
layers {
bottom: 'deconv1_2'
top: 'deconv1_2'
name: 'derelu1_2'
type: RELU }
# seg-score
layers
{
name: 'seg-score-voc' type: CONVOLUTION bottom: 'deconv1_2' top: 'seg-score'
blobs_lr: 1 blobs_lr: 2 weight_decay: 1 weight_decay: 0
convolution_param {
num_output: 21 kernel_size: 1
weight_filler {
type: "gaussian"std: 0.01 }
bias_filler {
type: "constant"value: 0 }
}
}

Я давно ищу в сети. Но бесполезно. Пожалуйста, помогите или попробуйте дать некоторые идеи, как решить эту проблему.

0

Решение

Вам нужно определить kernel_size для вашего слоя! kernel_size не может быть ноль.
Кафе позволяет определить kernel_size двумя способами:

  1. С помощью kernel_size один раз, чтобы использовать одно и то же значение для всех пространственных измерений, или один раз для каждого пространственного измерения.
  2. В качестве альтернативы для 2D-объектов вы можете указать kernel_h а также kernel_w по высоте и ширине ядра соответственно.

Увидеть "Deconvolution" слой помощи для получения дополнительной информации.

0

Другие решения

Других решений пока нет …

По вопросам рекламы [email protected]