[C++ 基于Eigen库实现CRN前向推理]
第三部分:TransposedConv2d实现 (含dilation)
1. 基于Eigen实现BatchNorm2d
1.1 Layer_BatchNorm2d.h
#ifndef CRN_LAYER_BATCHNORM2D_H
#define CRN_LAYER_BATCHNORM2D_H
#include "Eigen"
#include "mat.h"
#include "Eigen/CXX11/Tensor"
class Layer_BatchNorm2d {
public:
Layer_BatchNorm2d();
Layer_BatchNorm2d(int64_t bn_ch);
void LoadState(MATFile *pmFile, const std::string &state_preffix);
void LoadTestState();
Eigen::Tensor<float_t, 4> forward(Eigen::Tensor<float_t, 4> &input);
private:
int64_t channels;
Eigen::Tensor<float_t, 2> weights;
Eigen::Tensor<float_t, 2> bias;
Eigen::Tensor<float_t, 2> running_mean;
Eigen::Tensor<float_t, 2> running_var;
int32_t num_batches_tracked;
};
#endif
1.2 Layer_BatchNorm2d.cpp
#include "iostream"
#include "../include/Layer_BatchNorm2d.h"
Layer_BatchNorm2d::Layer_BatchNorm2d() {
this->channels = 1;
}
Layer_BatchNorm2d::Layer_BatchNorm2d(int64_t bn_ch) {
this->channels = bn_ch;
}
void Layer_BatchNorm2d::LoadState(MATFile *pmFile, const std::string &state_preffix) {
std::string weight_name = state_preffix + "_weight";
std::string bias_name = state_preffix + "_bias";
std::string rm_name = state_preffix + "_running_mean";
std::string rv_name = state_preffix + "_running_var";
std::string nbt_name = state_preffix + "_num_batches_tracked";
mxArray *pa = matGetVariable(pmFile, weight_name.c_str());
auto *values = (float_t *) mxGetData(pa);
long long dim1 = mxGetM(pa);
long long dim2 = mxGetN(pa);
this->weights.resize(dim1, dim2);
int idx = 0;
for (int i = 0; i < dim2; i++) {
for (int j = 0; j < dim1; j++) {
this->weights(j, i) =<