使用demo.py测试MobileNet-SSD报错:Check failed: target_blobs.size() == source_layer.blobs_size() (2 vs. 1)

本文详细介绍了如何将MobileNetSSD_train.prototxt网络配置调整为MobileNetSSD_deploy.prototxt的部署版本,重点在于修改输入输出层,并保持核心结构一致。最终模型包含多个卷积层和池化层,适用于目标检测任务。
摘要由CSDN通过智能技术生成
  • 测试使用的网络配置文件和训练时的不一样,因为作者提供的就不一样
  • MobileNetSSD_train.prototxt复制一份,根据MobileNetSSD_deploy.prototxt把输入和输出改了就行
  • 最终内容如下
name: "MobileNet-SSD"
input: "data"
input_shape {
  dim: 1
  dim: 3
  dim: 300
  dim: 300
}
layer {
  name: "conv0"
  type: "Convolution"
  bottom: "data"
  top: "conv0"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 32
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv0/bn"
  type: "BatchNorm"
  bottom: "conv0"
  top: "conv0"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv0/scale"
  type: "Scale"
  bottom: "conv0"
  top: "conv0"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv0/relu"
  type: "ReLU"
  bottom: "conv0"
  top: "conv0"
}
layer {
  name: "conv1/dw"
  type: "Convolution"
  bottom: "conv0"
  top: "conv1/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 32
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 32
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv1/dw/bn"
  type: "BatchNorm"
  bottom: "conv1/dw"
  top: "conv1/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv1/dw/scale"
  type: "Scale"
  bottom: "conv1/dw"
  top: "conv1/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv1/dw/relu"
  type: "ReLU"
  bottom: "conv1/dw"
  top: "conv1/dw"
}
layer {
  name: "conv1"
  type: "Convolution"
  bottom: "conv1/dw"
  top: "conv1"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 64
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv1/bn"
  type: "BatchNorm"
  bottom: "conv1"
  top: "conv1"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv1/scale"
  type: "Scale"
  bottom: "conv1"
  top: "conv1"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv1/relu"
  type: "ReLU"
  bottom: "conv1"
  top: "conv1"
}
layer {
  name: "conv2/dw"
  type: "Convolution"
  bottom: "conv1"
  top: "conv2/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 64
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    group: 64
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv2/dw/bn"
  type: "BatchNorm"
  bottom: "conv2/dw"
  top: "conv2/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv2/dw/scale"
  type: "Scale"
  bottom: "conv2/dw"
  top: "conv2/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv2/dw/relu"
  type: "ReLU"
  bottom: "conv2/dw"
  top: "conv2/dw"
}
layer {
  name: "conv2"
  type: "Convolution"
  bottom: "conv2/dw"
  top: "conv2"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv2/bn"
  type: "BatchNorm"
  bottom: "conv2"
  top: "conv2"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv2/scale"
  type: "Scale"
  bottom: "conv2"
  top: "conv2"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv2/relu"
  type: "ReLU"
  bottom: "conv2"
  top: "conv2"
}
layer {
  name: "conv3/dw"
  type: "Convolution"
  bottom: "conv2"
  top: "conv3/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 128
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv3/dw/bn"
  type: "BatchNorm"
  bottom: "conv3/dw"
  top: "conv3/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv3/dw/scale"
  type: "Scale"
  bottom: "conv3/dw"
  top: "conv3/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv3/dw/relu"
  type: "ReLU"
  bottom: "conv3/dw"
  top: "conv3/dw"
}
layer {
  name: "conv3"
  type: "Convolution"
  bottom: "conv3/dw"
  top: "conv3"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv3/bn"
  type: "BatchNorm"
  bottom: "conv3"
  top: "conv3"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv3/scale"
  type: "Scale"
  bottom: "conv3"
  top: "conv3"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv3/relu"
  type: "ReLU"
  bottom: "conv3"
  top: "conv3"
}
layer {
  name: "conv4/dw"
  type: "Convolution"
  bottom: "conv3"
  top: "conv4/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    group: 128
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv4/dw/bn"
  type: "BatchNorm"
  bottom: "conv4/dw"
  top: "conv4/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv4/dw/scale"
  type: "Scale"
  bottom: "conv4/dw"
  top: "conv4/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv4/dw/relu"
  type: "ReLU"
  bottom: "conv4/dw"
  top: "conv4/dw"
}
layer {
  name: "conv4"
  type: "Convolution"
  bottom: "conv4/dw"
  top: "conv4"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv4/bn"
  type: "BatchNorm"
  bottom: "conv4"
  top: "conv4"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv4/scale"
  type: "Scale"
  bottom: "conv4"
  top: "conv4"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv4/relu"
  type: "ReLU"
  bottom: "conv4"
  top: "conv4"
}
layer {
  name: "conv5/dw"
  type: "Convolution"
  bottom: "conv4"
  top: "conv5/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 256
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv5/dw/bn"
  type: "BatchNorm"
  bottom: "conv5/dw"
  top: "conv5/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv5/dw/scale"
  type: "Scale"
  bottom: "conv5/dw"
  top: "conv5/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv5/dw/relu"
  type: "ReLU"
  bottom: "conv5/dw"
  top: "conv5/dw"
}
layer {
  name: "conv5"
  type: "Convolution"
  bottom: "conv5/dw"
  top: "conv5"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv5/bn"
  type: "BatchNorm"
  bottom: "conv5"
  top: "conv5"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv5/scale"
  type: "Scale"
  bottom: "conv5"
  top: "conv5"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv5/relu"
  type: "ReLU"
  bottom: "conv5"
  top: "conv5"
}
layer {
  name: "conv6/dw"
  type: "Convolution"
  bottom: "conv5"
  top: "conv6/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    group: 256
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv6/dw/bn"
  type: "BatchNorm"
  bottom: "conv6/dw"
  top: "conv6/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv6/dw/scale"
  type: "Scale"
  bottom: "conv6/dw"
  top: "conv6/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv6/dw/relu"
  type: "ReLU"
  bottom: "conv6/dw"
  top: "conv6/dw"
}
layer {
  name: "conv6"
  type: "Convolution"
  bottom: "conv6/dw"
  top: "conv6"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv6/bn"
  type: "BatchNorm"
  bottom: "conv6"
  top: "conv6"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv6/scale"
  type: "Scale"
  bottom: "conv6"
  top: "conv6"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv6/relu"
  type: "ReLU"
  bottom: "conv6"
  top: "conv6"
}
layer {
  name: "conv7/dw"
  type: "Convolution"
  bottom: "conv6"
  top: "conv7/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv7/dw/bn"
  type: "BatchNorm"
  bottom: "conv7/dw"
  top: "conv7/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv7/dw/scale"
  type: "Scale"
  bottom: "conv7/dw"
  top: "conv7/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv7/dw/relu"
  type: "ReLU"
  bottom: "conv7/dw"
  top: "conv7/dw"
}
layer {
  name: "conv7"
  type: "Convolution"
  bottom: "conv7/dw"
  top: "conv7"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv7/bn"
  type: "BatchNorm"
  bottom: "conv7"
  top: "conv7"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv7/scale"
  type: "Scale"
  bottom: "conv7"
  top: "conv7"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv7/relu"
  type: "ReLU"
  bottom: "conv7"
  top: "conv7"
}
layer {
  name: "conv8/dw"
  type: "Convolution"
  bottom: "conv7"
  top: "conv8/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv8/dw/bn"
  type: "BatchNorm"
  bottom: "conv8/dw"
  top: "conv8/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv8/dw/scale"
  type: "Scale"
  bottom: "conv8/dw"
  top: "conv8/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv8/dw/relu"
  type: "ReLU"
  bottom: "conv8/dw"
  top: "conv8/dw"
}
layer {
  name: "conv8"
  type: "Convolution"
  bottom: "conv8/dw"
  top: "conv8"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv8/bn"
  type: "BatchNorm"
  bottom: "conv8"
  top: "conv8"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv8/scale"
  type: "Scale"
  bottom: "conv8"
  top: "conv8"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv8/relu"
  type: "ReLU"
  bottom: "conv8"
  top: "conv8"
}
layer {
  name: "conv9/dw"
  type: "Convolution"
  bottom: "conv8"
  top: "conv9/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv9/dw/bn"
  type: "BatchNorm"
  bottom: "conv9/dw"
  top: "conv9/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv9/dw/scale"
  type: "Scale"
  bottom: "conv9/dw"
  top: "conv9/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv9/dw/relu"
  type: "ReLU"
  bottom: "conv9/dw"
  top: "conv9/dw"
}
layer {
  name: "conv9"
  type: "Convolution"
  bottom: "conv9/dw"
  top: "conv9"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv9/bn"
  type: "BatchNorm"
  bottom: "conv9"
  top: "conv9"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv9/scale"
  type: "Scale"
  bottom: "conv9"
  top: "conv9"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv9/relu"
  type: "ReLU"
  bottom: "conv9"
  top: "conv9"
}
layer {
  name: "conv10/dw"
  type: "Convolution"
  bottom: "conv9"
  top: "conv10/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv10/dw/bn"
  type: "BatchNorm"
  bottom: "conv10/dw"
  top: "conv10/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv10/dw/scale"
  type: "Scale"
  bottom: "conv10/dw"
  top: "conv10/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv10/dw/relu"
  type: "ReLU"
  bottom: "conv10/dw"
  top: "conv10/dw"
}
layer {
  name: "conv10"
  type: "Convolution"
  bottom: "conv10/dw"
  top: "conv10"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv10/bn"
  type: "BatchNorm"
  bottom: "conv10"
  top: "conv10"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv10/scale"
  type: "Scale"
  bottom: "conv10"
  top: "conv10"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv10/relu"
  type: "ReLU"
  bottom: "conv10"
  top: "conv10"
}
layer {
  name: "conv11/dw"
  type: "Convolution"
  bottom: "conv10"
  top: "conv11/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv11/dw/bn"
  type: "BatchNorm"
  bottom: "conv11/dw"
  top: "conv11/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv11/dw/scale"
  type: "Scale"
  bottom: "conv11/dw"
  top: "conv11/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv11/dw/relu"
  type: "ReLU"
  bottom: "conv11/dw"
  top: "conv11/dw"
}
layer {
  name: "conv11"
  type: "Convolution"
  bottom: "conv11/dw"
  top: "conv11"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv11/bn"
  type: "BatchNorm"
  bottom: "conv11"
  top: "conv11"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv11/scale"
  type: "Scale"
  bottom: "conv11"
  top: "conv11"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv11/relu"
  type: "ReLU"
  bottom: "conv11"
  top: "conv11"
}
layer {
  name: "conv12/dw"
  type: "Convolution"
  bottom: "conv11"
  top: "conv12/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    group: 512
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv12/dw/bn"
  type: "BatchNorm"
  bottom: "conv12/dw"
  top: "conv12/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv12/dw/scale"
  type: "Scale"
  bottom: "conv12/dw"
  top: "conv12/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv12/dw/relu"
  type: "ReLU"
  bottom: "conv12/dw"
  top: "conv12/dw"
}
layer {
  name: "conv12"
  type: "Convolution"
  bottom: "conv12/dw"
  top: "conv12"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 1024
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv12/bn"
  type: "BatchNorm"
  bottom: "conv12"
  top: "conv12"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv12/scale"
  type: "Scale"
  bottom: "conv12"
  top: "conv12"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv12/relu"
  type: "ReLU"
  bottom: "conv12"
  top: "conv12"
}
layer {
  name: "conv13/dw"
  type: "Convolution"
  bottom: "conv12"
  top: "conv13/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 1024
    bias_term: false
    pad: 1
    kernel_size: 3
    group: 1024
    #engine: CAFFE
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv13/dw/bn"
  type: "BatchNorm"
  bottom: "conv13/dw"
  top: "conv13/dw"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv13/dw/scale"
  type: "Scale"
  bottom: "conv13/dw"
  top: "conv13/dw"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv13/dw/relu"
  type: "ReLU"
  bottom: "conv13/dw"
  top: "conv13/dw"
}
layer {
  name: "conv13"
  type: "Convolution"
  bottom: "conv13/dw"
  top: "conv13"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 1024
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv13/bn"
  type: "BatchNorm"
  bottom: "conv13"
  top: "conv13"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv13/scale"
  type: "Scale"
  bottom: "conv13"
  top: "conv13"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv13/relu"
  type: "ReLU"
  bottom: "conv13"
  top: "conv13"
}
layer {
  name: "conv14_1"
  type: "Convolution"
  bottom: "conv13"
  top: "conv14_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv14_1/bn"
  type: "BatchNorm"
  bottom: "conv14_1"
  top: "conv14_1"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv14_1/scale"
  type: "Scale"
  bottom: "conv14_1"
  top: "conv14_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv14_1/relu"
  type: "ReLU"
  bottom: "conv14_1"
  top: "conv14_1"
}
layer {
  name: "conv14_2"
  type: "Convolution"
  bottom: "conv14_1"
  top: "conv14_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 512
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv14_2/bn"
  type: "BatchNorm"
  bottom: "conv14_2"
  top: "conv14_2"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv14_2/scale"
  type: "Scale"
  bottom: "conv14_2"
  top: "conv14_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv14_2/relu"
  type: "ReLU"
  bottom: "conv14_2"
  top: "conv14_2"
}
layer {
  name: "conv15_1"
  type: "Convolution"
  bottom: "conv14_2"
  top: "conv15_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv15_1/bn"
  type: "BatchNorm"
  bottom: "conv15_1"
  top: "conv15_1"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv15_1/scale"
  type: "Scale"
  bottom: "conv15_1"
  top: "conv15_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv15_1/relu"
  type: "ReLU"
  bottom: "conv15_1"
  top: "conv15_1"
}
layer {
  name: "conv15_2"
  type: "Convolution"
  bottom: "conv15_1"
  top: "conv15_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv15_2/bn"
  type: "BatchNorm"
  bottom: "conv15_2"
  top: "conv15_2"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv15_2/scale"
  type: "Scale"
  bottom: "conv15_2"
  top: "conv15_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv15_2/relu"
  type: "ReLU"
  bottom: "conv15_2"
  top: "conv15_2"
}
layer {
  name: "conv16_1"
  type: "Convolution"
  bottom: "conv15_2"
  top: "conv16_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv16_1/bn"
  type: "BatchNorm"
  bottom: "conv16_1"
  top: "conv16_1"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv16_1/scale"
  type: "Scale"
  bottom: "conv16_1"
  top: "conv16_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv16_1/relu"
  type: "ReLU"
  bottom: "conv16_1"
  top: "conv16_1"
}
layer {
  name: "conv16_2"
  type: "Convolution"
  bottom: "conv16_1"
  top: "conv16_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 256
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv16_2/bn"
  type: "BatchNorm"
  bottom: "conv16_2"
  top: "conv16_2"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv16_2/scale"
  type: "Scale"
  bottom: "conv16_2"
  top: "conv16_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv16_2/relu"
  type: "ReLU"
  bottom: "conv16_2"
  top: "conv16_2"
}
layer {
  name: "conv17_1"
  type: "Convolution"
  bottom: "conv16_2"
  top: "conv17_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 64
    bias_term: false
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv17_1/bn"
  type: "BatchNorm"
  bottom: "conv17_1"
  top: "conv17_1"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv17_1/scale"
  type: "Scale"
  bottom: "conv17_1"
  top: "conv17_1"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv17_1/relu"
  type: "ReLU"
  bottom: "conv17_1"
  top: "conv17_1"
}
layer {
  name: "conv17_2"
  type: "Convolution"
  bottom: "conv17_1"
  top: "conv17_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  convolution_param {
    num_output: 128
    bias_term: false
    pad: 1
    kernel_size: 3
    stride: 2
    weight_filler {
      type: "msra"
    }
  }
}
layer {
  name: "conv17_2/bn"
  type: "BatchNorm"
  bottom: "conv17_2"
  top: "conv17_2"
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
  param {
    lr_mult: 0
    decay_mult: 0
  }
}
layer {
  name: "conv17_2/scale"
  type: "Scale"
  bottom: "conv17_2"
  top: "conv17_2"
  param {
    lr_mult: 0.1
    decay_mult: 0.0
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  scale_param {
    filler {
      value: 1
    }
    bias_term: true
    bias_filler {
      value: 0
    }
  }
}
layer {
  name: "conv17_2/relu"
  type: "ReLU"
  bottom: "conv17_2"
  top: "conv17_2"
}
layer {
  name: "conv11_mbox_loc"
  type: "Convolution"
  bottom: "conv11"
  top: "conv11_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 12
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv11_mbox_loc_perm"
  type: "Permute"
  bottom: "conv11_mbox_loc"
  top: "conv11_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv11_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv11_mbox_loc_perm"
  top: "conv11_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv11_mbox_conf"
  type: "Convolution"
  bottom: "conv11"
  top: "conv11_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 63
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv11_mbox_conf_perm"
  type: "Permute"
  bottom: "conv11_mbox_conf"
  top: "conv11_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv11_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv11_mbox_conf_perm"
  top: "conv11_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv11_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv11"
  bottom: "data"
  top: "conv11_mbox_priorbox"
  prior_box_param {
    min_size: 60.0
    aspect_ratio: 2.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv13_mbox_loc"
  type: "Convolution"
  bottom: "conv13"
  top: "conv13_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv13_mbox_loc_perm"
  type: "Permute"
  bottom: "conv13_mbox_loc"
  top: "conv13_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv13_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv13_mbox_loc_perm"
  top: "conv13_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv13_mbox_conf"
  type: "Convolution"
  bottom: "conv13"
  top: "conv13_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 126
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv13_mbox_conf_perm"
  type: "Permute"
  bottom: "conv13_mbox_conf"
  top: "conv13_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv13_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv13_mbox_conf_perm"
  top: "conv13_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv13_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv13"
  bottom: "data"
  top: "conv13_mbox_priorbox"
  prior_box_param {
    min_size: 105.0
    max_size: 150.0
    aspect_ratio: 2.0
    aspect_ratio: 3.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv14_2_mbox_loc"
  type: "Convolution"
  bottom: "conv14_2"
  top: "conv14_2_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv14_2_mbox_loc_perm"
  type: "Permute"
  bottom: "conv14_2_mbox_loc"
  top: "conv14_2_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv14_2_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv14_2_mbox_loc_perm"
  top: "conv14_2_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv14_2_mbox_conf"
  type: "Convolution"
  bottom: "conv14_2"
  top: "conv14_2_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 126
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv14_2_mbox_conf_perm"
  type: "Permute"
  bottom: "conv14_2_mbox_conf"
  top: "conv14_2_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv14_2_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv14_2_mbox_conf_perm"
  top: "conv14_2_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv14_2_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv14_2"
  bottom: "data"
  top: "conv14_2_mbox_priorbox"
  prior_box_param {
    min_size: 150.0
    max_size: 195.0
    aspect_ratio: 2.0
    aspect_ratio: 3.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv15_2_mbox_loc"
  type: "Convolution"
  bottom: "conv15_2"
  top: "conv15_2_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv15_2_mbox_loc_perm"
  type: "Permute"
  bottom: "conv15_2_mbox_loc"
  top: "conv15_2_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv15_2_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv15_2_mbox_loc_perm"
  top: "conv15_2_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv15_2_mbox_conf"
  type: "Convolution"
  bottom: "conv15_2"
  top: "conv15_2_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 126
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv15_2_mbox_conf_perm"
  type: "Permute"
  bottom: "conv15_2_mbox_conf"
  top: "conv15_2_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv15_2_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv15_2_mbox_conf_perm"
  top: "conv15_2_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv15_2_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv15_2"
  bottom: "data"
  top: "conv15_2_mbox_priorbox"
  prior_box_param {
    min_size: 195.0
    max_size: 240.0
    aspect_ratio: 2.0
    aspect_ratio: 3.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv16_2_mbox_loc"
  type: "Convolution"
  bottom: "conv16_2"
  top: "conv16_2_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv16_2_mbox_loc_perm"
  type: "Permute"
  bottom: "conv16_2_mbox_loc"
  top: "conv16_2_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv16_2_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv16_2_mbox_loc_perm"
  top: "conv16_2_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv16_2_mbox_conf"
  type: "Convolution"
  bottom: "conv16_2"
  top: "conv16_2_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 126
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv16_2_mbox_conf_perm"
  type: "Permute"
  bottom: "conv16_2_mbox_conf"
  top: "conv16_2_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv16_2_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv16_2_mbox_conf_perm"
  top: "conv16_2_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv16_2_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv16_2"
  bottom: "data"
  top: "conv16_2_mbox_priorbox"
  prior_box_param {
    min_size: 240.0
    max_size: 285.0
    aspect_ratio: 2.0
    aspect_ratio: 3.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv17_2_mbox_loc"
  type: "Convolution"
  bottom: "conv17_2"
  top: "conv17_2_mbox_loc"
  param {
    lr_mult: 0.1
    decay_mult: 0.1
  }
  param {
    lr_mult: 0.2
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 24
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv17_2_mbox_loc_perm"
  type: "Permute"
  bottom: "conv17_2_mbox_loc"
  top: "conv17_2_mbox_loc_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv17_2_mbox_loc_flat"
  type: "Flatten"
  bottom: "conv17_2_mbox_loc_perm"
  top: "conv17_2_mbox_loc_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv17_2_mbox_conf"
  type: "Convolution"
  bottom: "conv17_2"
  top: "conv17_2_mbox_conf"
  param {
    lr_mult: 1.0
    decay_mult: 1.0
  }
  param {
    lr_mult: 2.0
    decay_mult: 0.0
  }
  convolution_param {
    num_output: 126
    kernel_size: 1
    weight_filler {
      type: "msra"
    }
    bias_filler {
      type: "constant"
      value: 0.0
    }
  }
}
layer {
  name: "conv17_2_mbox_conf_perm"
  type: "Permute"
  bottom: "conv17_2_mbox_conf"
  top: "conv17_2_mbox_conf_perm"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "conv17_2_mbox_conf_flat"
  type: "Flatten"
  bottom: "conv17_2_mbox_conf_perm"
  top: "conv17_2_mbox_conf_flat"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "conv17_2_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv17_2"
  bottom: "data"
  top: "conv17_2_mbox_priorbox"
  prior_box_param {
    min_size: 285.0
    max_size: 300.0
    aspect_ratio: 2.0
    aspect_ratio: 3.0
    flip: true
    clip: false
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "mbox_loc"
  type: "Concat"
  bottom: "conv11_mbox_loc_flat"
  bottom: "conv13_mbox_loc_flat"
  bottom: "conv14_2_mbox_loc_flat"
  bottom: "conv15_2_mbox_loc_flat"
  bottom: "conv16_2_mbox_loc_flat"
  bottom: "conv17_2_mbox_loc_flat"
  top: "mbox_loc"
  concat_param {
    axis: 1
  }
}
layer {
  name: "mbox_conf"
  type: "Concat"
  bottom: "conv11_mbox_conf_flat"
  bottom: "conv13_mbox_conf_flat"
  bottom: "conv14_2_mbox_conf_flat"
  bottom: "conv15_2_mbox_conf_flat"
  bottom: "conv16_2_mbox_conf_flat"
  bottom: "conv17_2_mbox_conf_flat"
  top: "mbox_conf"
  concat_param {
    axis: 1
  }
}
layer {
  name: "mbox_priorbox"
  type: "Concat"
  bottom: "conv11_mbox_priorbox"
  bottom: "conv13_mbox_priorbox"
  bottom: "conv14_2_mbox_priorbox"
  bottom: "conv15_2_mbox_priorbox"
  bottom: "conv16_2_mbox_priorbox"
  bottom: "conv17_2_mbox_priorbox"
  top: "mbox_priorbox"
  concat_param {
    axis: 2
  }
}
layer {
  name: "mbox_conf_reshape"
  type: "Reshape"
  bottom: "mbox_conf"
  top: "mbox_conf_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 21
    }
  }
}
layer {
  name: "mbox_conf_softmax"
  type: "Softmax"
  bottom: "mbox_conf_reshape"
  top: "mbox_conf_softmax"
  softmax_param {
    axis: 2
  }
}
layer {
  name: "mbox_conf_flatten"
  type: "Flatten"
  bottom: "mbox_conf_softmax"
  top: "mbox_conf_flatten"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "detection_out"
  type: "DetectionOutput"
  bottom: "mbox_loc"
  bottom: "mbox_conf_flatten"
  bottom: "mbox_priorbox"
  top: "detection_out"
  include {
    phase: TEST
  }
  detection_output_param {
    num_classes: 21
    share_location: true
    background_label_id: 0
    nms_param {
      nms_threshold: 0.45
      top_k: 100
    }
    code_type: CENTER_SIZE
    keep_top_k: 100
    confidence_threshold: 0.25
  }
}
  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

刀么克瑟拉莫

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值