日志打印

I0222 19:22:12.178056 10448 caffe.cpp:218] Using GPUs 0
I0222 19:22:12.236059 10448 caffe.cpp:223] GPU 0: GeForce GTX 1080
I0222 19:22:12.749089 10448 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:12.749089 10448 solver.cpp:48] Initializing solver from parameters: 
test_iter: 500
test_interval: 500
base_lr: 1e-005
display: 1000
max_iter: 200000
lr_policy: "step"
gamma: 0.1
momentum: 0.9
weight_decay: 0.0005
stepsize: 50000
snapshot: 10000
snapshot_prefix: "E:/CAFFE/caffe-master/project/vgg16/caffe_vgg16_train"
solver_mode: GPU
device_id: 0
net: "E:/CAFFE/caffe-master/project/vgg16/train_val _1.prototxt"
train_state {
  level: 0
  stage: ""
}
I0222 19:22:12.750088 10448 solver.cpp:91] Creating training net from net file: E:/CAFFE/caffe-master/project/vgg16/train_val _1.prototxt
I0222 19:22:12.751088 10448 net.cpp:332] The NetState phase (0) differed from the phase (1) specified by a rule in layer data
I0222 19:22:12.751088 10448 net.cpp:332] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy_at_1
I0222 19:22:12.751088 10448 net.cpp:58] Initializing net from parameters: 
name: "VGG16"
state {
  phase: TRAIN
  level: 0
  stage: ""
}
layer {
  name: "data"
  type: "Data"
  top: "data"
  top: "label"
  include {
    phase: TRAIN
  }
  transform_param {
    mirror: true
    crop_size: 224
    mean_value: 103.939
    mean_value: 116.779
    mean_value: 123.68
  }
  data_param {
    source: "E:/CAFFE/caffe-master/project/vgg16/traindb"
    batch_size: 30
    backend: LMDB
  }
}
layer {
  name: "conv1_1"
  type: "Convolution"
  bottom: "data"
  top: "conv1_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu1_1"
  type: "ReLU"
  bottom: "conv1_1"
  top: "conv1_1"
}
layer {
  name: "conv1_2"
  type: "Convolution"
  bottom: "conv1_1"
  top: "conv1_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu1_2"
  type: "ReLU"
  bottom: "conv1_2"
  top: "conv1_2"
}
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "conv1_2"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2_1"
  type: "Convolution"
  bottom: "pool1"
  top: "conv2_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu2_1"
  type: "ReLU"
  bottom: "conv2_1"
  top: "conv2_1"
}
layer {
  name: "conv2_2"
  type: "Convolution"
  bottom: "conv2_1"
  top: "conv2_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu2_2"
  type: "ReLU"
  bottom: "conv2_2"
  top: "conv2_2"
}
layer {
  name: "pool2"
  type: "Pooling"
  bottom: "conv2_2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv3_1"
  type: "Convolution"
  bottom: "pool2"
  top: "conv3_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_1"
  type: "ReLU"
  bottom: "conv3_1"
  top: "conv3_1"
}
layer {
  name: "conv3_2"
  type: "Convolution"
  bottom: "conv3_1"
  top: "conv3_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_2"
  type: "ReLU"
  bottom: "conv3_2"
  top: "conv3_2"
}
layer {
  name: "conv3_3"
  type: "Convolution"
  bottom: "conv3_2"
  top: "conv3_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_3"
  type: "ReLU"
  bottom: "conv3_3"
  top: "conv3_3"
}
layer {
  name: "pool3"
  type: "Pooling"
  bottom: "conv3_3"
  top: "pool3"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv4_1"
  type: "Convolution"
  bottom: "pool3"
  top: "conv4_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_1"
  type: "ReLU"
  bottom: "conv4_1"
  top: "conv4_1"
}
layer {
  name: "conv4_2"
  type: "Convolution"
  bottom: "conv4_1"
  top: "conv4_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_2"
  type: "ReLU"
  bottom: "conv4_2"
  top: "conv4_2"
}
layer {
  name: "conv4_3"
  type: "Convolution"
  bottom: "conv4_2"
  top: "conv4_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_3"
  type: "ReLU"
  bottom: "conv4_3"
  top: "conv4_3"
}
layer {
  name: "pool4"
  type: "Pooling"
  bottom: "conv4_3"
  top: "pool4"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv5_1"
  type: "Convolution"
  bottom: "pool4"
  top: "conv5_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_1"
  type: "ReLU"
  bottom: "conv5_1"
  top: "conv5_1"
}
layer {
  name: "conv5_2"
  type: "Convolution"
  bottom: "conv5_1"
  top: "conv5_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_2"
  type: "ReLU"
  bottom: "conv5_2"
  top: "conv5_2"
}
layer {
  name: "conv5_3"
  type: "Convolution"
  bottom: "conv5_2"
  top: "conv5_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_3"
  type: "ReLU"
  bottom: "conv5_3"
  top: "conv5_3"
}
layer {
  name: "pool5"
  type: "Pooling"
  bottom: "conv5_3"
  top: "pool5"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "fc6"
  type: "InnerProduct"
  bottom: "pool5"
  top: "fc6"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu6"
  type: "ReLU"
  bottom: "fc6"
  top: "fc6"
}
layer {
  name: "drop6"
  type: "Dropout"
  bottom: "fc6"
  top: "fc6"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc7"
  type: "InnerProduct"
  bottom: "fc6"
  top: "fc7"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu7"
  type: "ReLU"
  bottom: "fc7"
  top: "fc7"
}
layer {
  name: "drop7"
  type: "Dropout"
  bottom: "fc7"
  top: "fc7"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc8_my"
  type: "InnerProduct"
  bottom: "fc7"
  top: "fc8_my"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 2
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "loss"
  type: "SoftmaxWithLoss"
  bottom: "fc8_my"
  bottom: "label"
  top: "loss"
}
I0222 19:22:12.751088 10448 layer_factory.hpp:77] Creating layer data
I0222 19:22:12.751088 10448 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:12.752089 10448 net.cpp:100] Creating Layer data
I0222 19:22:12.752089 10448 net.cpp:418] data -> data
I0222 19:22:12.752089 10448 net.cpp:418] data -> label
I0222 19:22:12.754088 14944 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:12.782090 14944 db_lmdb.cpp:40] Opened lmdb E:/CAFFE/caffe-master/project/vgg16/traindb
I0222 19:22:12.926098 10448 data_layer.cpp:41] output data size: 30,3,224,224
I0222 19:22:12.984102 10448 net.cpp:150] Setting up data
I0222 19:22:12.984102 10448 net.cpp:157] Top shape: 30 3 224 224 (4515840)
I0222 19:22:12.984102 10448 net.cpp:157] Top shape: 30 (30)
I0222 19:22:12.984102 10448 net.cpp:165] Memory required for data: 18063480
I0222 19:22:12.985101 10448 layer_factory.hpp:77] Creating layer conv1_1
I0222 19:22:12.985101 10448 net.cpp:100] Creating Layer conv1_1
I0222 19:22:12.985101 10448 net.cpp:444] conv1_1 <- data
I0222 19:22:12.985101 10448 net.cpp:418] conv1_1 -> conv1_1
I0222 19:22:12.986102 13944 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:13.439127 10448 net.cpp:150] Setting up conv1_1
I0222 19:22:13.439127 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:13.439127 10448 net.cpp:165] Memory required for data: 403415160
I0222 19:22:13.439127 10448 layer_factory.hpp:77] Creating layer relu1_1
I0222 19:22:13.439127 10448 net.cpp:100] Creating Layer relu1_1
I0222 19:22:13.439127 10448 net.cpp:444] relu1_1 <- conv1_1
I0222 19:22:13.439127 10448 net.cpp:405] relu1_1 -> conv1_1 (in-place)
I0222 19:22:13.440127 10448 net.cpp:150] Setting up relu1_1
I0222 19:22:13.440127 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:13.440127 10448 net.cpp:165] Memory required for data: 788766840
I0222 19:22:13.440127 10448 layer_factory.hpp:77] Creating layer conv1_2
I0222 19:22:13.440127 10448 net.cpp:100] Creating Layer conv1_2
I0222 19:22:13.440127 10448 net.cpp:444] conv1_2 <- conv1_1
I0222 19:22:13.440127 10448 net.cpp:418] conv1_2 -> conv1_2
I0222 19:22:13.442128 10448 net.cpp:150] Setting up conv1_2
I0222 19:22:13.442128 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:13.442128 10448 net.cpp:165] Memory required for data: 1174118520
I0222 19:22:13.442128 10448 layer_factory.hpp:77] Creating layer relu1_2
I0222 19:22:13.442128 10448 net.cpp:100] Creating Layer relu1_2
I0222 19:22:13.442128 10448 net.cpp:444] relu1_2 <- conv1_2
I0222 19:22:13.442128 10448 net.cpp:405] relu1_2 -> conv1_2 (in-place)
I0222 19:22:13.443128 10448 net.cpp:150] Setting up relu1_2
I0222 19:22:13.443128 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:13.443128 10448 net.cpp:165] Memory required for data: 1559470200
I0222 19:22:13.443128 10448 layer_factory.hpp:77] Creating layer pool1
I0222 19:22:13.443128 10448 net.cpp:100] Creating Layer pool1
I0222 19:22:13.443128 10448 net.cpp:444] pool1 <- conv1_2
I0222 19:22:13.443128 10448 net.cpp:418] pool1 -> pool1
I0222 19:22:13.444128 10448 net.cpp:150] Setting up pool1
I0222 19:22:13.444128 10448 net.cpp:157] Top shape: 30 64 112 112 (24084480)
I0222 19:22:13.444128 10448 net.cpp:165] Memory required for data: 1655808120
I0222 19:22:13.444128 10448 layer_factory.hpp:77] Creating layer conv2_1
I0222 19:22:13.444128 10448 net.cpp:100] Creating Layer conv2_1
I0222 19:22:13.444128 10448 net.cpp:444] conv2_1 <- pool1
I0222 19:22:13.444128 10448 net.cpp:418] conv2_1 -> conv2_1
I0222 19:22:13.448128 10448 net.cpp:150] Setting up conv2_1
I0222 19:22:13.448128 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:13.448128 10448 net.cpp:165] Memory required for data: 1848483960
I0222 19:22:13.448128 10448 layer_factory.hpp:77] Creating layer relu2_1
I0222 19:22:13.448128 10448 net.cpp:100] Creating Layer relu2_1
I0222 19:22:13.448128 10448 net.cpp:444] relu2_1 <- conv2_1
I0222 19:22:13.448128 10448 net.cpp:405] relu2_1 -> conv2_1 (in-place)
I0222 19:22:13.449128 10448 net.cpp:150] Setting up relu2_1
I0222 19:22:13.449128 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:13.449128 10448 net.cpp:165] Memory required for data: 2041159800
I0222 19:22:13.449128 10448 layer_factory.hpp:77] Creating layer conv2_2
I0222 19:22:13.449128 10448 net.cpp:100] Creating Layer conv2_2
I0222 19:22:13.449128 10448 net.cpp:444] conv2_2 <- conv2_1
I0222 19:22:13.449128 10448 net.cpp:418] conv2_2 -> conv2_2
I0222 19:22:13.453128 10448 net.cpp:150] Setting up conv2_2
I0222 19:22:13.453128 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:13.453128 10448 net.cpp:165] Memory required for data: 2233835640
I0222 19:22:13.453128 10448 layer_factory.hpp:77] Creating layer relu2_2
I0222 19:22:13.453128 10448 net.cpp:100] Creating Layer relu2_2
I0222 19:22:13.453128 10448 net.cpp:444] relu2_2 <- conv2_2
I0222 19:22:13.453128 10448 net.cpp:405] relu2_2 -> conv2_2 (in-place)
I0222 19:22:13.453128 10448 net.cpp:150] Setting up relu2_2
I0222 19:22:13.453128 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:13.453128 10448 net.cpp:165] Memory required for data: 2426511480
I0222 19:22:13.453128 10448 layer_factory.hpp:77] Creating layer pool2
I0222 19:22:13.453128 10448 net.cpp:100] Creating Layer pool2
I0222 19:22:13.453128 10448 net.cpp:444] pool2 <- conv2_2
I0222 19:22:13.453128 10448 net.cpp:418] pool2 -> pool2
I0222 19:22:13.453128 10448 net.cpp:150] Setting up pool2
I0222 19:22:13.453128 10448 net.cpp:157] Top shape: 30 128 56 56 (12042240)
I0222 19:22:13.453128 10448 net.cpp:165] Memory required for data: 2474680440
I0222 19:22:13.453128 10448 layer_factory.hpp:77] Creating layer conv3_1
I0222 19:22:13.453128 10448 net.cpp:100] Creating Layer conv3_1
I0222 19:22:13.453128 10448 net.cpp:444] conv3_1 <- pool2
I0222 19:22:13.453128 10448 net.cpp:418] conv3_1 -> conv3_1
I0222 19:22:13.459130 10448 net.cpp:150] Setting up conv3_1
I0222 19:22:13.459130 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.459130 10448 net.cpp:165] Memory required for data: 2571018360
I0222 19:22:13.459130 10448 layer_factory.hpp:77] Creating layer relu3_1
I0222 19:22:13.459130 10448 net.cpp:100] Creating Layer relu3_1
I0222 19:22:13.459130 10448 net.cpp:444] relu3_1 <- conv3_1
I0222 19:22:13.459130 10448 net.cpp:405] relu3_1 -> conv3_1 (in-place)
I0222 19:22:13.460129 10448 net.cpp:150] Setting up relu3_1
I0222 19:22:13.460129 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.460129 10448 net.cpp:165] Memory required for data: 2667356280
I0222 19:22:13.460129 10448 layer_factory.hpp:77] Creating layer conv3_2
I0222 19:22:13.460129 10448 net.cpp:100] Creating Layer conv3_2
I0222 19:22:13.460129 10448 net.cpp:444] conv3_2 <- conv3_1
I0222 19:22:13.460129 10448 net.cpp:418] conv3_2 -> conv3_2
I0222 19:22:13.472129 10448 net.cpp:150] Setting up conv3_2
I0222 19:22:13.472129 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.472129 10448 net.cpp:165] Memory required for data: 2763694200
I0222 19:22:13.472129 10448 layer_factory.hpp:77] Creating layer relu3_2
I0222 19:22:13.472129 10448 net.cpp:100] Creating Layer relu3_2
I0222 19:22:13.472129 10448 net.cpp:444] relu3_2 <- conv3_2
I0222 19:22:13.472129 10448 net.cpp:405] relu3_2 -> conv3_2 (in-place)
I0222 19:22:13.472129 10448 net.cpp:150] Setting up relu3_2
I0222 19:22:13.472129 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.472129 10448 net.cpp:165] Memory required for data: 2860032120
I0222 19:22:13.472129 10448 layer_factory.hpp:77] Creating layer conv3_3
I0222 19:22:13.472129 10448 net.cpp:100] Creating Layer conv3_3
I0222 19:22:13.472129 10448 net.cpp:444] conv3_3 <- conv3_2
I0222 19:22:13.472129 10448 net.cpp:418] conv3_3 -> conv3_3
I0222 19:22:13.483130 10448 net.cpp:150] Setting up conv3_3
I0222 19:22:13.483130 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.483130 10448 net.cpp:165] Memory required for data: 2956370040
I0222 19:22:13.483130 10448 layer_factory.hpp:77] Creating layer relu3_3
I0222 19:22:13.483130 10448 net.cpp:100] Creating Layer relu3_3
I0222 19:22:13.483130 10448 net.cpp:444] relu3_3 <- conv3_3
I0222 19:22:13.483130 10448 net.cpp:405] relu3_3 -> conv3_3 (in-place)
I0222 19:22:13.483130 10448 net.cpp:150] Setting up relu3_3
I0222 19:22:13.483130 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:13.483130 10448 net.cpp:165] Memory required for data: 3052707960
I0222 19:22:13.483130 10448 layer_factory.hpp:77] Creating layer pool3
I0222 19:22:13.483130 10448 net.cpp:100] Creating Layer pool3
I0222 19:22:13.483130 10448 net.cpp:444] pool3 <- conv3_3
I0222 19:22:13.483130 10448 net.cpp:418] pool3 -> pool3
I0222 19:22:13.483130 10448 net.cpp:150] Setting up pool3
I0222 19:22:13.483130 10448 net.cpp:157] Top shape: 30 256 28 28 (6021120)
I0222 19:22:13.483130 10448 net.cpp:165] Memory required for data: 3076792440
I0222 19:22:13.483130 10448 layer_factory.hpp:77] Creating layer conv4_1
I0222 19:22:13.483130 10448 net.cpp:100] Creating Layer conv4_1
I0222 19:22:13.483130 10448 net.cpp:444] conv4_1 <- pool3
I0222 19:22:13.483130 10448 net.cpp:418] conv4_1 -> conv4_1
I0222 19:22:13.504132 10448 net.cpp:150] Setting up conv4_1
I0222 19:22:13.505131 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.505131 10448 net.cpp:165] Memory required for data: 3124961400
I0222 19:22:13.505131 10448 layer_factory.hpp:77] Creating layer relu4_1
I0222 19:22:13.505131 10448 net.cpp:100] Creating Layer relu4_1
I0222 19:22:13.505131 10448 net.cpp:444] relu4_1 <- conv4_1
I0222 19:22:13.505131 10448 net.cpp:405] relu4_1 -> conv4_1 (in-place)
I0222 19:22:13.505131 10448 net.cpp:150] Setting up relu4_1
I0222 19:22:13.505131 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.505131 10448 net.cpp:165] Memory required for data: 3173130360
I0222 19:22:13.505131 10448 layer_factory.hpp:77] Creating layer conv4_2
I0222 19:22:13.505131 10448 net.cpp:100] Creating Layer conv4_2
I0222 19:22:13.505131 10448 net.cpp:444] conv4_2 <- conv4_1
I0222 19:22:13.505131 10448 net.cpp:418] conv4_2 -> conv4_2
I0222 19:22:13.542134 10448 net.cpp:150] Setting up conv4_2
I0222 19:22:13.542134 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.542134 10448 net.cpp:165] Memory required for data: 3221299320
I0222 19:22:13.542134 10448 layer_factory.hpp:77] Creating layer relu4_2
I0222 19:22:13.542134 10448 net.cpp:100] Creating Layer relu4_2
I0222 19:22:13.542134 10448 net.cpp:444] relu4_2 <- conv4_2
I0222 19:22:13.542134 10448 net.cpp:405] relu4_2 -> conv4_2 (in-place)
I0222 19:22:13.543133 10448 net.cpp:150] Setting up relu4_2
I0222 19:22:13.543133 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.543133 10448 net.cpp:165] Memory required for data: 3269468280
I0222 19:22:13.543133 10448 layer_factory.hpp:77] Creating layer conv4_3
I0222 19:22:13.543133 10448 net.cpp:100] Creating Layer conv4_3
I0222 19:22:13.543133 10448 net.cpp:444] conv4_3 <- conv4_2
I0222 19:22:13.543133 10448 net.cpp:418] conv4_3 -> conv4_3
I0222 19:22:13.581136 10448 net.cpp:150] Setting up conv4_3
I0222 19:22:13.581136 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.581136 10448 net.cpp:165] Memory required for data: 3317637240
I0222 19:22:13.581136 10448 layer_factory.hpp:77] Creating layer relu4_3
I0222 19:22:13.581136 10448 net.cpp:100] Creating Layer relu4_3
I0222 19:22:13.582136 10448 net.cpp:444] relu4_3 <- conv4_3
I0222 19:22:13.582136 10448 net.cpp:405] relu4_3 -> conv4_3 (in-place)
I0222 19:22:13.582136 10448 net.cpp:150] Setting up relu4_3
I0222 19:22:13.582136 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:13.582136 10448 net.cpp:165] Memory required for data: 3365806200
I0222 19:22:13.582136 10448 layer_factory.hpp:77] Creating layer pool4
I0222 19:22:13.582136 10448 net.cpp:100] Creating Layer pool4
I0222 19:22:13.582136 10448 net.cpp:444] pool4 <- conv4_3
I0222 19:22:13.582136 10448 net.cpp:418] pool4 -> pool4
I0222 19:22:13.582136 10448 net.cpp:150] Setting up pool4
I0222 19:22:13.582136 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.582136 10448 net.cpp:165] Memory required for data: 3377848440
I0222 19:22:13.582136 10448 layer_factory.hpp:77] Creating layer conv5_1
I0222 19:22:13.582136 10448 net.cpp:100] Creating Layer conv5_1
I0222 19:22:13.582136 10448 net.cpp:444] conv5_1 <- pool4
I0222 19:22:13.582136 10448 net.cpp:418] conv5_1 -> conv5_1
I0222 19:22:13.620138 10448 net.cpp:150] Setting up conv5_1
I0222 19:22:13.620138 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.620138 10448 net.cpp:165] Memory required for data: 3389890680
I0222 19:22:13.620138 10448 layer_factory.hpp:77] Creating layer relu5_1
I0222 19:22:13.620138 10448 net.cpp:100] Creating Layer relu5_1
I0222 19:22:13.620138 10448 net.cpp:444] relu5_1 <- conv5_1
I0222 19:22:13.620138 10448 net.cpp:405] relu5_1 -> conv5_1 (in-place)
I0222 19:22:13.620138 10448 net.cpp:150] Setting up relu5_1
I0222 19:22:13.620138 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.621139 10448 net.cpp:165] Memory required for data: 3401932920
I0222 19:22:13.621139 10448 layer_factory.hpp:77] Creating layer conv5_2
I0222 19:22:13.621139 10448 net.cpp:100] Creating Layer conv5_2
I0222 19:22:13.621139 10448 net.cpp:444] conv5_2 <- conv5_1
I0222 19:22:13.621139 10448 net.cpp:418] conv5_2 -> conv5_2
I0222 19:22:13.660140 10448 net.cpp:150] Setting up conv5_2
I0222 19:22:13.660140 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.660140 10448 net.cpp:165] Memory required for data: 3413975160
I0222 19:22:13.660140 10448 layer_factory.hpp:77] Creating layer relu5_2
I0222 19:22:13.660140 10448 net.cpp:100] Creating Layer relu5_2
I0222 19:22:13.660140 10448 net.cpp:444] relu5_2 <- conv5_2
I0222 19:22:13.660140 10448 net.cpp:405] relu5_2 -> conv5_2 (in-place)
I0222 19:22:13.661140 10448 net.cpp:150] Setting up relu5_2
I0222 19:22:13.661140 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.661140 10448 net.cpp:165] Memory required for data: 3426017400
I0222 19:22:13.661140 10448 layer_factory.hpp:77] Creating layer conv5_3
I0222 19:22:13.661140 10448 net.cpp:100] Creating Layer conv5_3
I0222 19:22:13.661140 10448 net.cpp:444] conv5_3 <- conv5_2
I0222 19:22:13.661140 10448 net.cpp:418] conv5_3 -> conv5_3
I0222 19:22:13.704144 10448 net.cpp:150] Setting up conv5_3
I0222 19:22:13.704144 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.704144 10448 net.cpp:165] Memory required for data: 3438059640
I0222 19:22:13.704144 10448 layer_factory.hpp:77] Creating layer relu5_3
I0222 19:22:13.704144 10448 net.cpp:100] Creating Layer relu5_3
I0222 19:22:13.704144 10448 net.cpp:444] relu5_3 <- conv5_3
I0222 19:22:13.704144 10448 net.cpp:405] relu5_3 -> conv5_3 (in-place)
I0222 19:22:13.704144 10448 net.cpp:150] Setting up relu5_3
I0222 19:22:13.704144 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:13.704144 10448 net.cpp:165] Memory required for data: 3450101880
I0222 19:22:13.704144 10448 layer_factory.hpp:77] Creating layer pool5
I0222 19:22:13.704144 10448 net.cpp:100] Creating Layer pool5
I0222 19:22:13.704144 10448 net.cpp:444] pool5 <- conv5_3
I0222 19:22:13.704144 10448 net.cpp:418] pool5 -> pool5
I0222 19:22:13.704144 10448 net.cpp:150] Setting up pool5
I0222 19:22:13.704144 10448 net.cpp:157] Top shape: 30 512 7 7 (752640)
I0222 19:22:13.704144 10448 net.cpp:165] Memory required for data: 3453112440
I0222 19:22:13.704144 10448 layer_factory.hpp:77] Creating layer fc6
I0222 19:22:13.704144 10448 net.cpp:100] Creating Layer fc6
I0222 19:22:13.704144 10448 net.cpp:444] fc6 <- pool5
I0222 19:22:13.704144 10448 net.cpp:418] fc6 -> fc6
I0222 19:22:15.477244 10448 net.cpp:150] Setting up fc6
I0222 19:22:15.477244 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.477244 10448 net.cpp:165] Memory required for data: 3453603960
I0222 19:22:15.477244 10448 layer_factory.hpp:77] Creating layer relu6
I0222 19:22:15.477244 10448 net.cpp:100] Creating Layer relu6
I0222 19:22:15.477244 10448 net.cpp:444] relu6 <- fc6
I0222 19:22:15.477244 10448 net.cpp:405] relu6 -> fc6 (in-place)
I0222 19:22:15.478245 10448 net.cpp:150] Setting up relu6
I0222 19:22:15.478245 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.478245 10448 net.cpp:165] Memory required for data: 3454095480
I0222 19:22:15.478245 10448 layer_factory.hpp:77] Creating layer drop6
I0222 19:22:15.478245 10448 net.cpp:100] Creating Layer drop6
I0222 19:22:15.478245 10448 net.cpp:444] drop6 <- fc6
I0222 19:22:15.478245 10448 net.cpp:405] drop6 -> fc6 (in-place)
I0222 19:22:15.478245 10448 net.cpp:150] Setting up drop6
I0222 19:22:15.478245 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.478245 10448 net.cpp:165] Memory required for data: 3454587000
I0222 19:22:15.478245 10448 layer_factory.hpp:77] Creating layer fc7
I0222 19:22:15.478245 10448 net.cpp:100] Creating Layer fc7
I0222 19:22:15.478245 10448 net.cpp:444] fc7 <- fc6
I0222 19:22:15.478245 10448 net.cpp:418] fc7 -> fc7
I0222 19:22:15.749260 10448 net.cpp:150] Setting up fc7
I0222 19:22:15.749260 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.749260 10448 net.cpp:165] Memory required for data: 3455078520
I0222 19:22:15.749260 10448 layer_factory.hpp:77] Creating layer relu7
I0222 19:22:15.749260 10448 net.cpp:100] Creating Layer relu7
I0222 19:22:15.749260 10448 net.cpp:444] relu7 <- fc7
I0222 19:22:15.749260 10448 net.cpp:405] relu7 -> fc7 (in-place)
I0222 19:22:15.749260 10448 net.cpp:150] Setting up relu7
I0222 19:22:15.749260 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.749260 10448 net.cpp:165] Memory required for data: 3455570040
I0222 19:22:15.749260 10448 layer_factory.hpp:77] Creating layer drop7
I0222 19:22:15.749260 10448 net.cpp:100] Creating Layer drop7
I0222 19:22:15.749260 10448 net.cpp:444] drop7 <- fc7
I0222 19:22:15.749260 10448 net.cpp:405] drop7 -> fc7 (in-place)
I0222 19:22:15.750260 10448 net.cpp:150] Setting up drop7
I0222 19:22:15.750260 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:15.750260 10448 net.cpp:165] Memory required for data: 3456061560
I0222 19:22:15.750260 10448 layer_factory.hpp:77] Creating layer fc8_my
I0222 19:22:15.750260 10448 net.cpp:100] Creating Layer fc8_my
I0222 19:22:15.750260 10448 net.cpp:444] fc8_my <- fc7
I0222 19:22:15.750260 10448 net.cpp:418] fc8_my -> fc8_my
I0222 19:22:15.750260 10448 net.cpp:150] Setting up fc8_my
I0222 19:22:15.750260 10448 net.cpp:157] Top shape: 30 2 (60)
I0222 19:22:15.750260 10448 net.cpp:165] Memory required for data: 3456061800
I0222 19:22:15.750260 10448 layer_factory.hpp:77] Creating layer loss
I0222 19:22:15.750260 10448 net.cpp:100] Creating Layer loss
I0222 19:22:15.750260 10448 net.cpp:444] loss <- fc8_my
I0222 19:22:15.750260 10448 net.cpp:444] loss <- label
I0222 19:22:15.750260 10448 net.cpp:418] loss -> loss
I0222 19:22:15.750260 10448 layer_factory.hpp:77] Creating layer loss
I0222 19:22:15.751260 10448 net.cpp:150] Setting up loss
I0222 19:22:15.751260 10448 net.cpp:157] Top shape: (1)
I0222 19:22:15.751260 10448 net.cpp:160]     with loss weight 1
I0222 19:22:15.752260 10448 net.cpp:165] Memory required for data: 3456061804
I0222 19:22:15.752260 10448 net.cpp:226] loss needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] fc8_my needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] drop7 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] relu7 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] fc7 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] drop6 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] relu6 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] fc6 needs backward computation.
I0222 19:22:15.752260 10448 net.cpp:226] pool5 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu5_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv5_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu5_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv5_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu5_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv5_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] pool4 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu4_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv4_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu4_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv4_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu4_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv4_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] pool3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu3_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv3_3 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu3_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv3_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu3_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv3_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] pool2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu2_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv2_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu2_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv2_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] pool1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu1_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv1_2 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] relu1_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:226] conv1_1 needs backward computation.
I0222 19:22:15.753260 10448 net.cpp:228] data does not need backward computation.
I0222 19:22:15.753260 10448 net.cpp:270] This network produces output loss
I0222 19:22:15.753260 10448 net.cpp:283] Network initialization done.
I0222 19:22:15.755260 10448 solver.cpp:181] Creating test net (#0) specified by net file: E:/CAFFE/caffe-master/project/vgg16/train_val _1.prototxt
I0222 19:22:15.755260 10448 net.cpp:332] The NetState phase (1) differed from the phase (0) specified by a rule in layer data
I0222 19:22:15.756260 10448 net.cpp:58] Initializing net from parameters: 
name: "VGG16"
state {
  phase: TEST
}
layer {
  name: "data"
  type: "Data"
  top: "data"
  top: "label"
  include {
    phase: TEST
  }
  transform_param {
    mirror: false
    crop_size: 224
    mean_value: 103.939
    mean_value: 116.779
    mean_value: 123.68
  }
  data_param {
    source: "E:/CAFFE/caffe-master/project/vgg16/valdb"
    batch_size: 30
    backend: LMDB
  }
}
layer {
  name: "conv1_1"
  type: "Convolution"
  bottom: "data"
  top: "conv1_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu1_1"
  type: "ReLU"
  bottom: "conv1_1"
  top: "conv1_1"
}
layer {
  name: "conv1_2"
  type: "Convolution"
  bottom: "conv1_1"
  top: "conv1_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu1_2"
  type: "ReLU"
  bottom: "conv1_2"
  top: "conv1_2"
}
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "conv1_2"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2_1"
  type: "Convolution"
  bottom: "pool1"
  top: "conv2_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu2_1"
  type: "ReLU"
  bottom: "conv2_1"
  top: "conv2_1"
}
layer {
  name: "conv2_2"
  type: "Convolution"
  bottom: "conv2_1"
  top: "conv2_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu2_2"
  type: "ReLU"
  bottom: "conv2_2"
  top: "conv2_2"
}
layer {
  name: "pool2"
  type: "Pooling"
  bottom: "conv2_2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv3_1"
  type: "Convolution"
  bottom: "pool2"
  top: "conv3_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_1"
  type: "ReLU"
  bottom: "conv3_1"
  top: "conv3_1"
}
layer {
  name: "conv3_2"
  type: "Convolution"
  bottom: "conv3_1"
  top: "conv3_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_2"
  type: "ReLU"
  bottom: "conv3_2"
  top: "conv3_2"
}
layer {
  name: "conv3_3"
  type: "Convolution"
  bottom: "conv3_2"
  top: "conv3_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3_3"
  type: "ReLU"
  bottom: "conv3_3"
  top: "conv3_3"
}
layer {
  name: "pool3"
  type: "Pooling"
  bottom: "conv3_3"
  top: "pool3"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv4_1"
  type: "Convolution"
  bottom: "pool3"
  top: "conv4_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_1"
  type: "ReLU"
  bottom: "conv4_1"
  top: "conv4_1"
}
layer {
  name: "conv4_2"
  type: "Convolution"
  bottom: "conv4_1"
  top: "conv4_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_2"
  type: "ReLU"
  bottom: "conv4_2"
  top: "conv4_2"
}
layer {
  name: "conv4_3"
  type: "Convolution"
  bottom: "conv4_2"
  top: "conv4_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu4_3"
  type: "ReLU"
  bottom: "conv4_3"
  top: "conv4_3"
}
layer {
  name: "pool4"
  type: "Pooling"
  bottom: "conv4_3"
  top: "pool4"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv5_1"
  type: "Convolution"
  bottom: "pool4"
  top: "conv5_1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_1"
  type: "ReLU"
  bottom: "conv5_1"
  top: "conv5_1"
}
layer {
  name: "conv5_2"
  type: "Convolution"
  bottom: "conv5_1"
  top: "conv5_2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_2"
  type: "ReLU"
  bottom: "conv5_2"
  top: "conv5_2"
}
layer {
  name: "conv5_3"
  type: "Convolution"
  bottom: "conv5_2"
  top: "conv5_3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu5_3"
  type: "ReLU"
  bottom: "conv5_3"
  top: "conv5_3"
}
layer {
  name: "pool5"
  type: "Pooling"
  bottom: "conv5_3"
  top: "pool5"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "fc6"
  type: "InnerProduct"
  bottom: "pool5"
  top: "fc6"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu6"
  type: "ReLU"
  bottom: "fc6"
  top: "fc6"
}
layer {
  name: "drop6"
  type: "Dropout"
  bottom: "fc6"
  top: "fc6"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc7"
  type: "InnerProduct"
  bottom: "fc6"
  top: "fc7"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu7"
  type: "ReLU"
  bottom: "fc7"
  top: "fc7"
}
layer {
  name: "drop7"
  type: "Dropout"
  bottom: "fc7"
  top: "fc7"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc8_my"
  type: "InnerProduct"
  bottom: "fc7"
  top: "fc8_my"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 2
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "accuracy_at_1"
  type: "Accuracy"
  bottom: "fc8_my"
  bottom: "label"
  top: "accuracy_at_1"
  include {
    phase: TEST
  }
  accuracy_param {
    top_k: 1
  }
}
layer {
  name: "loss"
  type: "SoftmaxWithLoss"
  bottom: "fc8_my"
  bottom: "label"
  top: "loss"
}
I0222 19:22:15.756260 10448 layer_factory.hpp:77] Creating layer data
I0222 19:22:15.756260 10448 net.cpp:100] Creating Layer data
I0222 19:22:15.756260 10448 net.cpp:418] data -> data
I0222 19:22:15.756260 10448 net.cpp:418] data -> label
I0222 19:22:15.758260 14916 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:15.795262 14916 db_lmdb.cpp:40] Opened lmdb E:/CAFFE/caffe-master/project/vgg16/valdb
I0222 19:22:15.808264 10448 data_layer.cpp:41] output data size: 30,3,224,224
I0222 19:22:15.867266 10448 net.cpp:150] Setting up data
I0222 19:22:15.867266 10448 net.cpp:157] Top shape: 30 3 224 224 (4515840)
I0222 19:22:15.867266 10448 net.cpp:157] Top shape: 30 (30)
I0222 19:22:15.867266 10448 net.cpp:165] Memory required for data: 18063480
I0222 19:22:15.867266 10448 layer_factory.hpp:77] Creating layer label_data_1_split
I0222 19:22:15.867266 10448 net.cpp:100] Creating Layer label_data_1_split
I0222 19:22:15.867266 10448 net.cpp:444] label_data_1_split <- label
I0222 19:22:15.867266 10448 net.cpp:418] label_data_1_split -> label_data_1_split_0
I0222 19:22:15.867266 10448 net.cpp:418] label_data_1_split -> label_data_1_split_1
I0222 19:22:15.867266 10448 net.cpp:150] Setting up label_data_1_split
I0222 19:22:15.867266 10448 net.cpp:157] Top shape: 30 (30)
I0222 19:22:15.867266 10448 net.cpp:157] Top shape: 30 (30)
I0222 19:22:15.867266 10448 net.cpp:165] Memory required for data: 18063720
I0222 19:22:15.867266 10448 layer_factory.hpp:77] Creating layer conv1_1
I0222 19:22:15.867266 10448 net.cpp:100] Creating Layer conv1_1
I0222 19:22:15.867266 10448 net.cpp:444] conv1_1 <- data
I0222 19:22:15.867266 10448 net.cpp:418] conv1_1 -> conv1_1
I0222 19:22:15.870266 14448 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0222 19:22:15.876267 10448 net.cpp:150] Setting up conv1_1
I0222 19:22:15.876267 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:15.876267 10448 net.cpp:165] Memory required for data: 403415400
I0222 19:22:15.876267 10448 layer_factory.hpp:77] Creating layer relu1_1
I0222 19:22:15.876267 10448 net.cpp:100] Creating Layer relu1_1
I0222 19:22:15.876267 10448 net.cpp:444] relu1_1 <- conv1_1
I0222 19:22:15.876267 10448 net.cpp:405] relu1_1 -> conv1_1 (in-place)
I0222 19:22:15.877267 10448 net.cpp:150] Setting up relu1_1
I0222 19:22:15.877267 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:15.877267 10448 net.cpp:165] Memory required for data: 788767080
I0222 19:22:15.877267 10448 layer_factory.hpp:77] Creating layer conv1_2
I0222 19:22:15.877267 10448 net.cpp:100] Creating Layer conv1_2
I0222 19:22:15.877267 10448 net.cpp:444] conv1_2 <- conv1_1
I0222 19:22:15.877267 10448 net.cpp:418] conv1_2 -> conv1_2
I0222 19:22:15.880267 10448 net.cpp:150] Setting up conv1_2
I0222 19:22:15.881268 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:15.881268 10448 net.cpp:165] Memory required for data: 1174118760
I0222 19:22:15.881268 10448 layer_factory.hpp:77] Creating layer relu1_2
I0222 19:22:15.881268 10448 net.cpp:100] Creating Layer relu1_2
I0222 19:22:15.881268 10448 net.cpp:444] relu1_2 <- conv1_2
I0222 19:22:15.881268 10448 net.cpp:405] relu1_2 -> conv1_2 (in-place)
I0222 19:22:15.882267 10448 net.cpp:150] Setting up relu1_2
I0222 19:22:15.882267 10448 net.cpp:157] Top shape: 30 64 224 224 (96337920)
I0222 19:22:15.882267 10448 net.cpp:165] Memory required for data: 1559470440
I0222 19:22:15.882267 10448 layer_factory.hpp:77] Creating layer pool1
I0222 19:22:15.882267 10448 net.cpp:100] Creating Layer pool1
I0222 19:22:15.882267 10448 net.cpp:444] pool1 <- conv1_2
I0222 19:22:15.882267 10448 net.cpp:418] pool1 -> pool1
I0222 19:22:15.882267 10448 net.cpp:150] Setting up pool1
I0222 19:22:15.882267 10448 net.cpp:157] Top shape: 30 64 112 112 (24084480)
I0222 19:22:15.882267 10448 net.cpp:165] Memory required for data: 1655808360
I0222 19:22:15.882267 10448 layer_factory.hpp:77] Creating layer conv2_1
I0222 19:22:15.882267 10448 net.cpp:100] Creating Layer conv2_1
I0222 19:22:15.882267 10448 net.cpp:444] conv2_1 <- pool1
I0222 19:22:15.882267 10448 net.cpp:418] conv2_1 -> conv2_1
I0222 19:22:15.887267 10448 net.cpp:150] Setting up conv2_1
I0222 19:22:15.887267 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:15.887267 10448 net.cpp:165] Memory required for data: 1848484200
I0222 19:22:15.887267 10448 layer_factory.hpp:77] Creating layer relu2_1
I0222 19:22:15.887267 10448 net.cpp:100] Creating Layer relu2_1
I0222 19:22:15.887267 10448 net.cpp:444] relu2_1 <- conv2_1
I0222 19:22:15.887267 10448 net.cpp:405] relu2_1 -> conv2_1 (in-place)
I0222 19:22:15.888267 10448 net.cpp:150] Setting up relu2_1
I0222 19:22:15.888267 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:15.888267 10448 net.cpp:165] Memory required for data: 2041160040
I0222 19:22:15.888267 10448 layer_factory.hpp:77] Creating layer conv2_2
I0222 19:22:15.888267 10448 net.cpp:100] Creating Layer conv2_2
I0222 19:22:15.888267 10448 net.cpp:444] conv2_2 <- conv2_1
I0222 19:22:15.888267 10448 net.cpp:418] conv2_2 -> conv2_2
I0222 19:22:15.895268 10448 net.cpp:150] Setting up conv2_2
I0222 19:22:15.895268 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:15.896268 10448 net.cpp:165] Memory required for data: 2233835880
I0222 19:22:15.896268 10448 layer_factory.hpp:77] Creating layer relu2_2
I0222 19:22:15.896268 10448 net.cpp:100] Creating Layer relu2_2
I0222 19:22:15.896268 10448 net.cpp:444] relu2_2 <- conv2_2
I0222 19:22:15.896268 10448 net.cpp:405] relu2_2 -> conv2_2 (in-place)
I0222 19:22:15.898268 10448 net.cpp:150] Setting up relu2_2
I0222 19:22:15.898268 10448 net.cpp:157] Top shape: 30 128 112 112 (48168960)
I0222 19:22:15.898268 10448 net.cpp:165] Memory required for data: 2426511720
I0222 19:22:15.899268 10448 layer_factory.hpp:77] Creating layer pool2
I0222 19:22:15.899268 10448 net.cpp:100] Creating Layer pool2
I0222 19:22:15.899268 10448 net.cpp:444] pool2 <- conv2_2
I0222 19:22:15.899268 10448 net.cpp:418] pool2 -> pool2
I0222 19:22:15.899268 10448 net.cpp:150] Setting up pool2
I0222 19:22:15.899268 10448 net.cpp:157] Top shape: 30 128 56 56 (12042240)
I0222 19:22:15.899268 10448 net.cpp:165] Memory required for data: 2474680680
I0222 19:22:15.899268 10448 layer_factory.hpp:77] Creating layer conv3_1
I0222 19:22:15.899268 10448 net.cpp:100] Creating Layer conv3_1
I0222 19:22:15.899268 10448 net.cpp:444] conv3_1 <- pool2
I0222 19:22:15.899268 10448 net.cpp:418] conv3_1 -> conv3_1
I0222 19:22:15.906270 10448 net.cpp:150] Setting up conv3_1
I0222 19:22:15.906270 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.906270 10448 net.cpp:165] Memory required for data: 2571018600
I0222 19:22:15.906270 10448 layer_factory.hpp:77] Creating layer relu3_1
I0222 19:22:15.906270 10448 net.cpp:100] Creating Layer relu3_1
I0222 19:22:15.906270 10448 net.cpp:444] relu3_1 <- conv3_1
I0222 19:22:15.906270 10448 net.cpp:405] relu3_1 -> conv3_1 (in-place)
I0222 19:22:15.906270 10448 net.cpp:150] Setting up relu3_1
I0222 19:22:15.906270 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.906270 10448 net.cpp:165] Memory required for data: 2667356520
I0222 19:22:15.906270 10448 layer_factory.hpp:77] Creating layer conv3_2
I0222 19:22:15.906270 10448 net.cpp:100] Creating Layer conv3_2
I0222 19:22:15.906270 10448 net.cpp:444] conv3_2 <- conv3_1
I0222 19:22:15.906270 10448 net.cpp:418] conv3_2 -> conv3_2
I0222 19:22:15.922269 10448 net.cpp:150] Setting up conv3_2
I0222 19:22:15.922269 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.922269 10448 net.cpp:165] Memory required for data: 2763694440
I0222 19:22:15.922269 10448 layer_factory.hpp:77] Creating layer relu3_2
I0222 19:22:15.922269 10448 net.cpp:100] Creating Layer relu3_2
I0222 19:22:15.922269 10448 net.cpp:444] relu3_2 <- conv3_2
I0222 19:22:15.922269 10448 net.cpp:405] relu3_2 -> conv3_2 (in-place)
I0222 19:22:15.922269 10448 net.cpp:150] Setting up relu3_2
I0222 19:22:15.922269 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.922269 10448 net.cpp:165] Memory required for data: 2860032360
I0222 19:22:15.922269 10448 layer_factory.hpp:77] Creating layer conv3_3
I0222 19:22:15.923270 10448 net.cpp:100] Creating Layer conv3_3
I0222 19:22:15.923270 10448 net.cpp:444] conv3_3 <- conv3_2
I0222 19:22:15.923270 10448 net.cpp:418] conv3_3 -> conv3_3
I0222 19:22:15.935271 10448 net.cpp:150] Setting up conv3_3
I0222 19:22:15.935271 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.935271 10448 net.cpp:165] Memory required for data: 2956370280
I0222 19:22:15.935271 10448 layer_factory.hpp:77] Creating layer relu3_3
I0222 19:22:15.935271 10448 net.cpp:100] Creating Layer relu3_3
I0222 19:22:15.935271 10448 net.cpp:444] relu3_3 <- conv3_3
I0222 19:22:15.935271 10448 net.cpp:405] relu3_3 -> conv3_3 (in-place)
I0222 19:22:15.935271 10448 net.cpp:150] Setting up relu3_3
I0222 19:22:15.935271 10448 net.cpp:157] Top shape: 30 256 56 56 (24084480)
I0222 19:22:15.935271 10448 net.cpp:165] Memory required for data: 3052708200
I0222 19:22:15.935271 10448 layer_factory.hpp:77] Creating layer pool3
I0222 19:22:15.935271 10448 net.cpp:100] Creating Layer pool3
I0222 19:22:15.936270 10448 net.cpp:444] pool3 <- conv3_3
I0222 19:22:15.936270 10448 net.cpp:418] pool3 -> pool3
I0222 19:22:15.936270 10448 net.cpp:150] Setting up pool3
I0222 19:22:15.936270 10448 net.cpp:157] Top shape: 30 256 28 28 (6021120)
I0222 19:22:15.936270 10448 net.cpp:165] Memory required for data: 3076792680
I0222 19:22:15.936270 10448 layer_factory.hpp:77] Creating layer conv4_1
I0222 19:22:15.936270 10448 net.cpp:100] Creating Layer conv4_1
I0222 19:22:15.936270 10448 net.cpp:444] conv4_1 <- pool3
I0222 19:22:15.936270 10448 net.cpp:418] conv4_1 -> conv4_1
I0222 19:22:15.958272 10448 net.cpp:150] Setting up conv4_1
I0222 19:22:15.958272 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:15.958272 10448 net.cpp:165] Memory required for data: 3124961640
I0222 19:22:15.958272 10448 layer_factory.hpp:77] Creating layer relu4_1
I0222 19:22:15.958272 10448 net.cpp:100] Creating Layer relu4_1
I0222 19:22:15.958272 10448 net.cpp:444] relu4_1 <- conv4_1
I0222 19:22:15.958272 10448 net.cpp:405] relu4_1 -> conv4_1 (in-place)
I0222 19:22:15.959272 10448 net.cpp:150] Setting up relu4_1
I0222 19:22:15.959272 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:15.959272 10448 net.cpp:165] Memory required for data: 3173130600
I0222 19:22:15.959272 10448 layer_factory.hpp:77] Creating layer conv4_2
I0222 19:22:15.959272 10448 net.cpp:100] Creating Layer conv4_2
I0222 19:22:15.959272 10448 net.cpp:444] conv4_2 <- conv4_1
I0222 19:22:15.959272 10448 net.cpp:418] conv4_2 -> conv4_2
I0222 19:22:15.996274 10448 net.cpp:150] Setting up conv4_2
I0222 19:22:15.996274 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:15.996274 10448 net.cpp:165] Memory required for data: 3221299560
I0222 19:22:15.996274 10448 layer_factory.hpp:77] Creating layer relu4_2
I0222 19:22:15.996274 10448 net.cpp:100] Creating Layer relu4_2
I0222 19:22:15.996274 10448 net.cpp:444] relu4_2 <- conv4_2
I0222 19:22:15.996274 10448 net.cpp:405] relu4_2 -> conv4_2 (in-place)
I0222 19:22:15.997274 10448 net.cpp:150] Setting up relu4_2
I0222 19:22:15.997274 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:15.997274 10448 net.cpp:165] Memory required for data: 3269468520
I0222 19:22:15.997274 10448 layer_factory.hpp:77] Creating layer conv4_3
I0222 19:22:15.997274 10448 net.cpp:100] Creating Layer conv4_3
I0222 19:22:15.997274 10448 net.cpp:444] conv4_3 <- conv4_2
I0222 19:22:15.997274 10448 net.cpp:418] conv4_3 -> conv4_3
I0222 19:22:16.037276 10448 net.cpp:150] Setting up conv4_3
I0222 19:22:16.037276 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:16.037276 10448 net.cpp:165] Memory required for data: 3317637480
I0222 19:22:16.037276 10448 layer_factory.hpp:77] Creating layer relu4_3
I0222 19:22:16.037276 10448 net.cpp:100] Creating Layer relu4_3
I0222 19:22:16.037276 10448 net.cpp:444] relu4_3 <- conv4_3
I0222 19:22:16.037276 10448 net.cpp:405] relu4_3 -> conv4_3 (in-place)
I0222 19:22:16.037276 10448 net.cpp:150] Setting up relu4_3
I0222 19:22:16.037276 10448 net.cpp:157] Top shape: 30 512 28 28 (12042240)
I0222 19:22:16.037276 10448 net.cpp:165] Memory required for data: 3365806440
I0222 19:22:16.037276 10448 layer_factory.hpp:77] Creating layer pool4
I0222 19:22:16.037276 10448 net.cpp:100] Creating Layer pool4
I0222 19:22:16.037276 10448 net.cpp:444] pool4 <- conv4_3
I0222 19:22:16.037276 10448 net.cpp:418] pool4 -> pool4
I0222 19:22:16.037276 10448 net.cpp:150] Setting up pool4
I0222 19:22:16.037276 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.037276 10448 net.cpp:165] Memory required for data: 3377848680
I0222 19:22:16.037276 10448 layer_factory.hpp:77] Creating layer conv5_1
I0222 19:22:16.037276 10448 net.cpp:100] Creating Layer conv5_1
I0222 19:22:16.037276 10448 net.cpp:444] conv5_1 <- pool4
I0222 19:22:16.037276 10448 net.cpp:418] conv5_1 -> conv5_1
I0222 19:22:16.074278 10448 net.cpp:150] Setting up conv5_1
I0222 19:22:16.074278 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.074278 10448 net.cpp:165] Memory required for data: 3389890920
I0222 19:22:16.074278 10448 layer_factory.hpp:77] Creating layer relu5_1
I0222 19:22:16.074278 10448 net.cpp:100] Creating Layer relu5_1
I0222 19:22:16.074278 10448 net.cpp:444] relu5_1 <- conv5_1
I0222 19:22:16.074278 10448 net.cpp:405] relu5_1 -> conv5_1 (in-place)
I0222 19:22:16.074278 10448 net.cpp:150] Setting up relu5_1
I0222 19:22:16.074278 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.074278 10448 net.cpp:165] Memory required for data: 3401933160
I0222 19:22:16.074278 10448 layer_factory.hpp:77] Creating layer conv5_2
I0222 19:22:16.074278 10448 net.cpp:100] Creating Layer conv5_2
I0222 19:22:16.074278 10448 net.cpp:444] conv5_2 <- conv5_1
I0222 19:22:16.074278 10448 net.cpp:418] conv5_2 -> conv5_2
I0222 19:22:16.125282 10448 net.cpp:150] Setting up conv5_2
I0222 19:22:16.125282 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.125282 10448 net.cpp:165] Memory required for data: 3413975400
I0222 19:22:16.125282 10448 layer_factory.hpp:77] Creating layer relu5_2
I0222 19:22:16.125282 10448 net.cpp:100] Creating Layer relu5_2
I0222 19:22:16.125282 10448 net.cpp:444] relu5_2 <- conv5_2
I0222 19:22:16.125282 10448 net.cpp:405] relu5_2 -> conv5_2 (in-place)
I0222 19:22:16.126281 10448 net.cpp:150] Setting up relu5_2
I0222 19:22:16.126281 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.126281 10448 net.cpp:165] Memory required for data: 3426017640
I0222 19:22:16.126281 10448 layer_factory.hpp:77] Creating layer conv5_3
I0222 19:22:16.126281 10448 net.cpp:100] Creating Layer conv5_3
I0222 19:22:16.126281 10448 net.cpp:444] conv5_3 <- conv5_2
I0222 19:22:16.126281 10448 net.cpp:418] conv5_3 -> conv5_3
I0222 19:22:16.172284 10448 net.cpp:150] Setting up conv5_3
I0222 19:22:16.172284 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.172284 10448 net.cpp:165] Memory required for data: 3438059880
I0222 19:22:16.172284 10448 layer_factory.hpp:77] Creating layer relu5_3
I0222 19:22:16.172284 10448 net.cpp:100] Creating Layer relu5_3
I0222 19:22:16.172284 10448 net.cpp:444] relu5_3 <- conv5_3
I0222 19:22:16.173285 10448 net.cpp:405] relu5_3 -> conv5_3 (in-place)
I0222 19:22:16.173285 10448 net.cpp:150] Setting up relu5_3
I0222 19:22:16.174284 10448 net.cpp:157] Top shape: 30 512 14 14 (3010560)
I0222 19:22:16.174284 10448 net.cpp:165] Memory required for data: 3450102120
I0222 19:22:16.174284 10448 layer_factory.hpp:77] Creating layer pool5
I0222 19:22:16.174284 10448 net.cpp:100] Creating Layer pool5
I0222 19:22:16.174284 10448 net.cpp:444] pool5 <- conv5_3
I0222 19:22:16.174284 10448 net.cpp:418] pool5 -> pool5
I0222 19:22:16.174284 10448 net.cpp:150] Setting up pool5
I0222 19:22:16.174284 10448 net.cpp:157] Top shape: 30 512 7 7 (752640)
I0222 19:22:16.174284 10448 net.cpp:165] Memory required for data: 3453112680
I0222 19:22:16.174284 10448 layer_factory.hpp:77] Creating layer fc6
I0222 19:22:16.174284 10448 net.cpp:100] Creating Layer fc6
I0222 19:22:16.174284 10448 net.cpp:444] fc6 <- pool5
I0222 19:22:16.174284 10448 net.cpp:418] fc6 -> fc6
I0222 19:22:17.869381 10448 net.cpp:150] Setting up fc6
I0222 19:22:17.869381 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:17.869381 10448 net.cpp:165] Memory required for data: 3453604200
I0222 19:22:17.869381 10448 layer_factory.hpp:77] Creating layer relu6
I0222 19:22:17.869381 10448 net.cpp:100] Creating Layer relu6
I0222 19:22:17.869381 10448 net.cpp:444] relu6 <- fc6
I0222 19:22:17.869381 10448 net.cpp:405] relu6 -> fc6 (in-place)
I0222 19:22:17.871381 10448 net.cpp:150] Setting up relu6
I0222 19:22:17.871381 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:17.871381 10448 net.cpp:165] Memory required for data: 3454095720
I0222 19:22:17.871381 10448 layer_factory.hpp:77] Creating layer drop6
I0222 19:22:17.871381 10448 net.cpp:100] Creating Layer drop6
I0222 19:22:17.871381 10448 net.cpp:444] drop6 <- fc6
I0222 19:22:17.871381 10448 net.cpp:405] drop6 -> fc6 (in-place)
I0222 19:22:17.871381 10448 net.cpp:150] Setting up drop6
I0222 19:22:17.871381 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:17.871381 10448 net.cpp:165] Memory required for data: 3454587240
I0222 19:22:17.871381 10448 layer_factory.hpp:77] Creating layer fc7
I0222 19:22:17.871381 10448 net.cpp:100] Creating Layer fc7
I0222 19:22:17.871381 10448 net.cpp:444] fc7 <- fc6
I0222 19:22:17.871381 10448 net.cpp:418] fc7 -> fc7
I0222 19:22:18.127396 10448 net.cpp:150] Setting up fc7
I0222 19:22:18.127396 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:18.127396 10448 net.cpp:165] Memory required for data: 3455078760
I0222 19:22:18.127396 10448 layer_factory.hpp:77] Creating layer relu7
I0222 19:22:18.127396 10448 net.cpp:100] Creating Layer relu7
I0222 19:22:18.127396 10448 net.cpp:444] relu7 <- fc7
I0222 19:22:18.127396 10448 net.cpp:405] relu7 -> fc7 (in-place)
I0222 19:22:18.128396 10448 net.cpp:150] Setting up relu7
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:18.128396 10448 net.cpp:165] Memory required for data: 3455570280
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer drop7
I0222 19:22:18.128396 10448 net.cpp:100] Creating Layer drop7
I0222 19:22:18.128396 10448 net.cpp:444] drop7 <- fc7
I0222 19:22:18.128396 10448 net.cpp:405] drop7 -> fc7 (in-place)
I0222 19:22:18.128396 10448 net.cpp:150] Setting up drop7
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: 30 4096 (122880)
I0222 19:22:18.128396 10448 net.cpp:165] Memory required for data: 3456061800
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer fc8_my
I0222 19:22:18.128396 10448 net.cpp:100] Creating Layer fc8_my
I0222 19:22:18.128396 10448 net.cpp:444] fc8_my <- fc7
I0222 19:22:18.128396 10448 net.cpp:418] fc8_my -> fc8_my
I0222 19:22:18.128396 10448 net.cpp:150] Setting up fc8_my
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: 30 2 (60)
I0222 19:22:18.128396 10448 net.cpp:165] Memory required for data: 3456062040
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer fc8_my_fc8_my_0_split
I0222 19:22:18.128396 10448 net.cpp:100] Creating Layer fc8_my_fc8_my_0_split
I0222 19:22:18.128396 10448 net.cpp:444] fc8_my_fc8_my_0_split <- fc8_my
I0222 19:22:18.128396 10448 net.cpp:418] fc8_my_fc8_my_0_split -> fc8_my_fc8_my_0_split_0
I0222 19:22:18.128396 10448 net.cpp:418] fc8_my_fc8_my_0_split -> fc8_my_fc8_my_0_split_1
I0222 19:22:18.128396 10448 net.cpp:150] Setting up fc8_my_fc8_my_0_split
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: 30 2 (60)
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: 30 2 (60)
I0222 19:22:18.128396 10448 net.cpp:165] Memory required for data: 3456062520
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer accuracy_at_1
I0222 19:22:18.128396 10448 net.cpp:100] Creating Layer accuracy_at_1
I0222 19:22:18.128396 10448 net.cpp:444] accuracy_at_1 <- fc8_my_fc8_my_0_split_0
I0222 19:22:18.128396 10448 net.cpp:444] accuracy_at_1 <- label_data_1_split_0
I0222 19:22:18.128396 10448 net.cpp:418] accuracy_at_1 -> accuracy_at_1
I0222 19:22:18.128396 10448 net.cpp:150] Setting up accuracy_at_1
I0222 19:22:18.128396 10448 net.cpp:157] Top shape: (1)
I0222 19:22:18.128396 10448 net.cpp:165] Memory required for data: 3456062524
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer loss
I0222 19:22:18.128396 10448 net.cpp:100] Creating Layer loss
I0222 19:22:18.128396 10448 net.cpp:444] loss <- fc8_my_fc8_my_0_split_1
I0222 19:22:18.128396 10448 net.cpp:444] loss <- label_data_1_split_1
I0222 19:22:18.128396 10448 net.cpp:418] loss -> loss
I0222 19:22:18.128396 10448 layer_factory.hpp:77] Creating layer loss
I0222 19:22:18.129396 10448 net.cpp:150] Setting up loss
I0222 19:22:18.129396 10448 net.cpp:157] Top shape: (1)
I0222 19:22:18.129396 10448 net.cpp:160]     with loss weight 1
I0222 19:22:18.129396 10448 net.cpp:165] Memory required for data: 3456062528
I0222 19:22:18.129396 10448 net.cpp:226] loss needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:228] accuracy_at_1 does not need backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] fc8_my_fc8_my_0_split needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] fc8_my needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] drop7 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu7 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] fc7 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] drop6 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu6 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] fc6 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] pool5 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu5_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv5_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu5_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv5_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu5_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv5_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] pool4 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu4_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv4_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu4_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv4_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu4_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv4_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] pool3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu3_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv3_3 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu3_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv3_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu3_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv3_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] pool2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu2_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv2_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu2_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv2_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] pool1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu1_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv1_2 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] relu1_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:226] conv1_1 needs backward computation.
I0222 19:22:18.129396 10448 net.cpp:228] label_data_1_split does not need backward computation.
I0222 19:22:18.129396 10448 net.cpp:228] data does not need backward computation.
I0222 19:22:18.129396 10448 net.cpp:270] This network produces output accuracy_at_1
I0222 19:22:18.130396 10448 net.cpp:270] This network produces output loss
I0222 19:22:18.130396 10448 net.cpp:283] Network initialization done.
I0222 19:22:18.130396 10448 solver.cpp:60] Solver scaffolding done.
I0222 19:22:18.131397 10448 caffe.cpp:155] Finetuning from E:\CAFFE\caffe-master\models\vgg_16\VGG_ILSVRC_16_layers.caffemodel
[libprotobuf WARNING ..\src\google\protobuf\io\coded_stream.cc:537] Reading dangerously large protocol message.  If the message turns out to be larger than 2147483647 bytes, parsing will be halted for security reasons.  To increase the limit (or to disable these warnings), see CodedInputStream::SetTotalBytesLimit() in google/protobuf/io/coded_stream.h.
[libprotobuf WARNING ..\src\google\protobuf\io\coded_stream.cc:78] The total number of bytes read was 553432081
I0222 19:22:20.907555 10448 upgrade_proto.cpp:52] Attempting to upgrade input file specified using deprecated V1LayerParameter: E:\CAFFE\caffe-master\models\vgg_16\VGG_ILSVRC_16_layers.caffemodel
I0222 19:22:21.498589 10448 upgrade_proto.cpp:60] Successfully upgraded file specified using deprecated V1LayerParameter
I0222 19:22:21.539592 10448 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: E:\CAFFE\caffe-master\models\vgg_16\VGG_ILSVRC_16_layers.caffemodel
I0222 19:22:21.540591 10448 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
W0222 19:22:21.540591 10448 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer conv1_1
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer relu1_1
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer conv1_2
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer relu1_2
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer pool1
I0222 19:22:21.541591 10448 net.cpp:774] Copying source layer conv2_1
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer relu2_1
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer conv2_2
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer relu2_2
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer pool2
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer conv3_1
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer relu3_1
I0222 19:22:21.542592 10448 net.cpp:774] Copying source layer conv3_2
I0222 19:22:21.543591 10448 net.cpp:774] Copying source layer relu3_2
I0222 19:22:21.543591 10448 net.cpp:774] Copying source layer conv3_3
I0222 19:22:21.544591 10448 net.cpp:774] Copying source layer relu3_3
I0222 19:22:21.544591 10448 net.cpp:774] Copying source layer pool3
I0222 19:22:21.544591 10448 net.cpp:774] Copying source layer conv4_1
I0222 19:22:21.546591 10448 net.cpp:774] Copying source layer relu4_1
I0222 19:22:21.546591 10448 net.cpp:774] Copying source layer conv4_2
I0222 19:22:21.549592 10448 net.cpp:774] Copying source layer relu4_2
I0222 19:22:21.549592 10448 net.cpp:774] Copying source layer conv4_3
I0222 19:22:21.553592 10448 net.cpp:774] Copying source layer relu4_3
I0222 19:22:21.554592 10448 net.cpp:774] Copying source layer pool4
I0222 19:22:21.554592 10448 net.cpp:774] Copying source layer conv5_1
I0222 19:22:21.557592 10448 net.cpp:774] Copying source layer relu5_1
I0222 19:22:21.557592 10448 net.cpp:774] Copying source layer conv5_2
I0222 19:22:21.561592 10448 net.cpp:774] Copying source layer relu5_2
I0222 19:22:21.561592 10448 net.cpp:774] Copying source layer conv5_3
I0222 19:22:21.564592 10448 net.cpp:774] Copying source layer relu5_3
I0222 19:22:21.564592 10448 net.cpp:774] Copying source layer pool5
I0222 19:22:21.564592 10448 net.cpp:774] Copying source layer fc6
I0222 19:22:21.666599 10448 net.cpp:774] Copying source layer relu6
I0222 19:22:21.666599 10448 net.cpp:774] Copying source layer drop6
I0222 19:22:21.666599 10448 net.cpp:774] Copying source layer fc7
I0222 19:22:21.684599 10448 net.cpp:774] Copying source layer relu7
I0222 19:22:21.684599 10448 net.cpp:774] Copying source layer drop7
I0222 19:22:21.684599 10448 net.cpp:771] Ignoring source layer fc8
I0222 19:22:21.684599 10448 net.cpp:771] Ignoring source layer prob
[libprotobuf WARNING ..\src\google\protobuf\io\coded_stream.cc:537] Reading dangerously large protocol message.  If the message turns out to be larger than 2147483647 bytes, parsing will be halted for security reasons.  To increase the limit (or to disable these warnings), see CodedInputStream::SetTotalBytesLimit() in google/protobuf/io/coded_stream.h.
[libprotobuf WARNING ..\src\google\protobuf\io\coded_stream.cc:78] The total number of bytes read was 553432081
I0222 19:22:22.763661 10448 upgrade_proto.cpp:52] Attempting to upgrade input file specified using deprecated V1LayerParameter: E:\CAFFE\caffe-master\models\vgg_16\VGG_ILSVRC_16_layers.caffemodel
I0222 19:22:23.351694 10448 upgrade_proto.cpp:60] Successfully upgraded file specified using deprecated V1LayerParameter
I0222 19:22:23.396697 10448 upgrade_proto.cpp:66] Attempting to upgrade input file specified using deprecated input fields: E:\CAFFE\caffe-master\models\vgg_16\VGG_ILSVRC_16_layers.caffemodel
I0222 19:22:23.396697 10448 upgrade_proto.cpp:69] Successfully upgraded file specified using deprecated input fields.
W0222 19:22:23.396697 10448 upgrade_proto.cpp:71] Note that future Caffe releases will only support input layers and not input fields.
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer conv1_1
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer relu1_1
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer conv1_2
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer relu1_2
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer pool1
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer conv2_1
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer relu2_1
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer conv2_2
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer relu2_2
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer pool2
I0222 19:22:23.397697 10448 net.cpp:774] Copying source layer conv3_1
I0222 19:22:23.398697 10448 net.cpp:774] Copying source layer relu3_1
I0222 19:22:23.398697 10448 net.cpp:774] Copying source layer conv3_2
I0222 19:22:23.398697 10448 net.cpp:774] Copying source layer relu3_2
I0222 19:22:23.399698 10448 net.cpp:774] Copying source layer conv3_3
I0222 19:22:23.399698 10448 net.cpp:774] Copying source layer relu3_3
I0222 19:22:23.399698 10448 net.cpp:774] Copying source layer pool3
I0222 19:22:23.399698 10448 net.cpp:774] Copying source layer conv4_1
I0222 19:22:23.401697 10448 net.cpp:774] Copying source layer relu4_1
I0222 19:22:23.401697 10448 net.cpp:774] Copying source layer conv4_2
I0222 19:22:23.405697 10448 net.cpp:774] Copying source layer relu4_2
I0222 19:22:23.405697 10448 net.cpp:774] Copying source layer conv4_3
I0222 19:22:23.408699 10448 net.cpp:774] Copying source layer relu4_3
I0222 19:22:23.409698 10448 net.cpp:774] Copying source layer pool4
I0222 19:22:23.409698 10448 net.cpp:774] Copying source layer conv5_1
I0222 19:22:23.412698 10448 net.cpp:774] Copying source layer relu5_1
I0222 19:22:23.412698 10448 net.cpp:774] Copying source layer conv5_2
I0222 19:22:23.415699 10448 net.cpp:774] Copying source layer relu5_2
I0222 19:22:23.416698 10448 net.cpp:774] Copying source layer conv5_3
I0222 19:22:23.419698 10448 net.cpp:774] Copying source layer relu5_3
I0222 19:22:23.419698 10448 net.cpp:774] Copying source layer pool5
I0222 19:22:23.419698 10448 net.cpp:774] Copying source layer fc6
I0222 19:22:23.526705 10448 net.cpp:774] Copying source layer relu6
I0222 19:22:23.526705 10448 net.cpp:774] Copying source layer drop6
I0222 19:22:23.526705 10448 net.cpp:774] Copying source layer fc7
I0222 19:22:23.542706 10448 net.cpp:774] Copying source layer relu7
I0222 19:22:23.542706 10448 net.cpp:774] Copying source layer drop7
I0222 19:22:23.542706 10448 net.cpp:771] Ignoring source layer fc8
I0222 19:22:23.542706 10448 net.cpp:771] Ignoring source layer prob
I0222 19:22:23.625710 10448 caffe.cpp:252] Starting Optimization
I0222 19:22:23.625710 10448 solver.cpp:279] Solving VGG16
I0222 19:22:23.625710 10448 solver.cpp:280] Learning Rate Policy: step
I0222 19:22:23.686714 10448 solver.cpp:337] Iteration 0, Testing net (#0)
I0222 19:23:36.112856 10448 solver.cpp:404]     Test net output #0: accuracy_at_1 = 0.460467
I0222 19:23:36.112856 10448 solver.cpp:404]     Test net output #1: loss = 0.739445 (* 1 = 0.739445 loss)
I0222 19:23:36.975905 10448 solver.cpp:228] Iteration 0, loss = 0.665385
I0222 19:23:36.975905 10448 solver.cpp:244]     Train net output #0: loss = 0.665385 (* 1 = 0.6
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值