template <typename Dtype>
void BatchNormLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
//获取BatchNormParameter参数列表
BatchNormParameter param = this->layer_param_.batch_norm_param();
//得到moving_average_fraction参数
moving_average_fraction_ = param.moving_average_fraction();
//赋值use_global_stats_,如果为Test,则为True
use_global_stats_ = this->phase_ == TEST;
//如果参数列表里面定义了use_global_stats,那么从参数列表里面获取
if (param.has_use_global_stats())
use_global_stats_ = param.use_global_stats();
//计算channels_
if (bottom[0]->num_axes() == 1)
channels_ = 1;
else
channels_ = bottom[0]->shape(1);
//从参数列表里面获取eps小量
eps_ = param.eps();
//初始化三个blob,其中前两个blob的大小为channels_,第三个blob的大小为1
//blob[0]保存全局mean,blob[1]保存全局variance,blob[2]保存最后的乘子
if (this->blobs_.size() > 0) {
LOG(INFO) << "Skipping parameter initialization";
} else {
this->blobs_.resize(3);
vector<int> sz;
sz.push_back(channels_);
this->blobs_[0].reset(new Blob<Dtype>(sz));
this->blobs_[1].reset(new Blob<Dtype>(sz));
sz[0]=1;
this->blobs_[2].reset(new Blob<Dtype>(sz));
for (int i = 0; i < 3; ++i) {
caffe_set(this->blobs_[i]->count(), Dtype(0),
this->blobs_[i]->mutable_cpu_data());
}
}
}
template <typename Dtype>
void BatchNormLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
if (botto