caffe使用MemoryDataLayer从内存中加载数据

最近在搞caffe的应用,因为很多时候我们需要进行服务器来进行特征的抽取,所以我们需要很将单张图片丢入caffe的网络进行一次传递,这样就诞生了一个从内存中如何加载数据进入caffe的需求,这里我直接贴出代码来先:


#include <boost/make_shared.hpp>


// these need to be included after boost on OS X
#include <string>  // NOLINT(build/include_order)
#include <vector>  // NOLINT(build/include_order)
#include <fstream>  // NOLINT


#include "caffe/caffe.hpp"
#include <opencv.hpp>


static void CheckFile(const std::string& filename) {
	std::ifstream f(filename.c_str());
	if (!f.good()) {
		f.close();
		throw std::runtime_error("Could not open file " + filename);
	}
	f.close();
}




template <typename Dtype>
caffe::Net<Dtype>* Net_Init_Load(
	std::string param_file, std::string pretrained_param_file, caffe::Phase phase)
{
	CheckFile(param_file);
	CheckFile(pretrained_param_file);


	caffe::Net<Dtype>* net(new caffe::Net<Dtype>(param_file,phase));
	


	net->CopyTrainedLayersFrom(pretrained_param_file,0);
	return net;
}
#define NetF float




int main()
{
	cv::Mat src1;
	src1 = cv::imread("test.png");


	cv::Mat rszimage;


	 The mean file image size is 256x256, need to resize the input image to 256x256
	cv::resize(src1, rszimage, cv::Size(227, 227));
	std::vector<cv::Mat> dv = { rszimage }; // image is a cv::Mat, as I'm using #1416
	std::vector<int> dvl = { 0 };
	
	caffe::Datum data;
	caffe::ReadFileToDatum("D:/work/DestImage/crop/CH0005-00-0019/00028.png", &data);


	caffe::Net<NetF>* _net = Net_Init_Load<NetF>("deploy_Test.prototxt", "bvlc_alexnet.caffemodel", caffe::TEST);
	caffe::MemoryDataLayer<NetF> *m_layer_ = (caffe::MemoryDataLayer<NetF> *)_net->layers()[0].get();
	m_layer_->AddMatVector(dv, dvl);
	
	/*float loss = 0.0;
	std::vector<caffe::Blob<float>*> results = _net->ForwardPrefilled(&loss);*/
	int end_ind = _net->layers().size();
	std::vector<caffe::Blob<NetF>*> input_vec;
	_net->Forward(input_vec);
	boost::shared_ptr<caffe::Blob<NetF>> outPool5 = _net->blob_by_name("pool5");
	std::cout << outPool5->shape()[0] << std::endl;
	std::cout << outPool5->shape()[1] << std::endl;
	std::cout << outPool5->shape()[2] << std::endl;
	std::cout << outPool5->shape()[3] << std::endl;


	std::cout << outPool5->num() << std::endl;
	std::cout << outPool5->channels() << std::endl;
	std::cout << outPool5->width() << std::endl;
	std::cout << outPool5->height() << std::endl;
	std::cout << outPool5->data_at(0, 0, 0, 0) << std::endl;
	std::cout << outPool5->data_at(0, 0, 1, 1) << std::endl;
	std::cout << outPool5->data_at(0, 95, 5, 5) << std::endl;


	const NetF* pstart = outPool5->cpu_data();
	std::cout << m_layer_->width() << std::endl;
	
	return 0;
}

然后是配置文件:

name: "CaffeNet"


layers 
{
  name: "data"
  type: MEMORY_DATA
  top: "data"
  top: "label"
  memory_data_param 
  {
    batch_size: 1
    channels: 3
    height: 227
    width: 227
  }
  transform_param 
  {
    crop_size: 227
    mirror: false
    #mean_file:"imagenet_mean.binaryproto"
	mean_value: 104
	mean_value: 117
    mean_value: 123
  }
}


layers {
  name: "`"
  type: CONVOLUTION
  bottom: "data"       
  top: "conv1"         
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 96
    kernel_size: 11
    stride: 4
  }
}
layers {
  name: "relu1"
  type: RELU
  bottom: "conv1"      
  top: "conv1"         
}
layers {
  name: "pool1"
  type: POOLING
  bottom: "conv1"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "norm1"
  type: LRN
  bottom: "pool1"
  top: "norm1"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layers {
  name: "conv2"
  type: CONVOLUTION
  bottom: "norm1"
  top: "conv2"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 256
    pad: 2
    kernel_size: 5
    group: 2
  }
}
layers {
  name: "relu2"
  type: RELU
  bottom: "conv2"
  top: "conv2"
}
layers {
  name: "pool2"
  type: POOLING
  bottom: "conv2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "norm2"
  type: LRN
  bottom: "pool2"
  top: "norm2"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layers {
  name: "conv3"
  type: CONVOLUTION
  bottom: "norm2"
  top: "conv3"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
  }
}
layers {
  name: "relu3"
  type: RELU
  bottom: "conv3"
  top: "conv3"
}
layers {
  name: "conv4"
  type: CONVOLUTION
  bottom: "conv3"
  top: "conv4"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
    group: 2
  }
}
layers {
  name: "relu4"
  type: RELU
  bottom: "conv4"
  top: "conv4"
}
layers {
  name: "conv5"
  type: CONVOLUTION
  bottom: "conv4"
  top: "conv5"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    group: 2
  }
}
layers {
  name: "relu5"
  type: RELU
  bottom: "conv5"
  top: "conv5"
}
layers {
  name: "pool5"
  type: POOLING
  bottom: "conv5"
  top: "pool5"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layers {
  name: "fc6"
  type: INNER_PRODUCT
  bottom: "pool5"
  top: "fc6"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 4096
  }
}
layers {
  name: "relu6"
  type: RELU
  bottom: "fc6"
  top: "fc6"
}
layers {
  name: "drop6"
  type: DROPOUT
  bottom: "fc6"
  top: "fc6"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  name: "fc7"
  type: INNER_PRODUCT
  bottom: "fc6"
  top: "fc7"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 4096
  }
}
layers {
  name: "relu7"
  type: RELU
  bottom: "fc7"
  top: "fc7"
}
layers {
  name: "drop7"
  type: DROPOUT
  bottom: "fc7"
  top: "fc7"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  name: "fc8"
  type: INNER_PRODUCT
  bottom: "fc7"
  top: "fc8"
  blobs_lr: 1
  blobs_lr: 2
  weight_decay: 1
  weight_decay: 0
  inner_product_param {
    num_output: 1000
  }
}

layers 
{
  name: "prob"
  type: SOFTMAX
  bottom: "fc8"
  top: "prob"
}

layers 
{
  name: "output"
  type: ARGMAX
  bottom: "prob"
  top: "output"
}

我的模型使用的是alexnet,例子是用来抽取一个图片在pool5那一层的特征。这样大家使用这个例子可以利用caffe的任意模型抽取任意图片的特征。





评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值