Blob是caffe基本的数据单元;Layer是基本的计算单元(包含前向反向传播算法等);Net则代表完整的CNN模型!
Net对应的描述符文件为 *.prototxt !
编写测试代码
#include <vector>
#include <iostream>
#include <caffe/net.hpp>
using namespace caffe;
using namespace std;
int main(void)
{
//获取网络描述
std::string proto("deploy.prototxt");
Net<float> nn(proto, caffe::TEST); //实例化对象
vector<string> bn = nn.blob_names();//获取net中的所有blob对象名
cout<<"#### Blob Names #####"<<endl;
for(int i = 0; i < bn.size(); i++)
{
cout<<"Blob #"<<i<<" : "<<bn[i]<<endl;
}
vector<string> ln = nn.layer_names();
cout<<"#### Layer Names #####"<<endl;
for(int i = 0; i < ln.size(); i++)
{
cout<<"Layer #"<<i<<" : "<<ln[i]<<endl;
}
return 0;
}
结论:Net中既包括Layer对象,又包括Blob对象。Blob对象用于存放Layer输入/输出的中间结果,Layer根据Net的表述对指定的输入Blob进行某些计算处理(卷积,下采样,全连接,非线性变换,计算代价函数等),输出结果放到指定的Blob中。输入Blob和输出Blob可能为同一个;
可以通过has_blob(),has_layer()函数查询当前Net对象是否包含指定名字的Blob或者Layer对象,如果返回值为真,则可以进一步调用blob_by_name(),layer_by_name()函数直接获取相应的BlobhuoLayer只指针,进行进一步操作(提取某层计算出特征或者某个Blob的权值!)
g++ -o netapp net_demo.cpp -I /home/zhao/ming/caffe0/caffe/include/ -D CPU_ONLY -I /home/zhao/ming/caffe0/caffe/.build_release/src/ -L /home/zhao/ming/caffe0/caffe/build/lib/ -lcaffe -lglog -lboost_system -lprotobuf
Caffe自带的CaffeNet模型
name: "CaffeNet"
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "norm1"
type: "LRN"
bottom: "pool1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "norm1"
top: "conv2"
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "norm2"
type: "LRN"
bottom: "pool2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "norm2"
top: "conv3"
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6"
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc7"
type: "InnerProduct"
bottom: "fc6"
top: "fc7"
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7"
top: "fc7"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7"
top: "fc7"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc7"
top: "fc8"
inner_product_param {
num_output: 1000
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "fc8"
top: "prob"
}