# C++从零实现深度神经网络之壹——Net类的设计和神经网络的初始化

#ifndef NET_H
#define NET_H

#endif // NET_H

#pragma once

#include <iostream>
#include<opencv2\core\core.hpp>
#include<opencv2\highgui\highgui.hpp>
//#include<iomanip>

#include"Function.h"

namespace liu
{

class Net
{
public:
std::vector<int> layer_neuron_num;
std::vector<cv::Mat> layer;
std::vector<cv::Mat> weights;
std::vector<cv::Mat> bias;
public:
Net() {};
~Net() {};

//Initialize net:genetate weights matrices、layer matrices and bias matrices
// bias default all zero
void initNet(std::vector<int> layer_neuron_num_);

//Initialise the weights matrices.
void initWeights(int type = 0, double a = 0., double b = 0.1);

//Initialise the bias matrices.
void initBias(cv::Scalar& bias);

//Forward
void farward();

//Forward
void backward();

protected:
//initialise the weight matrix.if type =0,Gaussian.else uniform.
void initWeight(cv::Mat &dst, int type, double a, double b);

//Activation function
cv::Mat activationFunction(cv::Mat &x, std::string func_type);

//Compute delta error
void deltaError();

//Update weights
void updateWeights();
};

}

Net类的成员函数除了默认的构造函数和析构函数，还有：

initNet()：用来初始化神经网络

initWeights()：初始化权值矩阵，调用initWeight()函数

initBias()：初始化偏置项

forward()：执行前向运算，包括线性运算和非线性激活，同时计算误差

backward()：执行反向传播，调用updateWeights()函数更新权值。

	//Initialize net
void Net::initNet(std::vector<int> layer_neuron_num_)
{
layer_neuron_num = layer_neuron_num_;
//Generate every layer.
layer.resize(layer_neuron_num.size());
for (int i = 0; i < layer.size(); i++)
{
layer[i].create(layer_neuron_num[i], 1, CV_32FC1);
}
std::cout << "Generate layers, successfully!" << std::endl;

//Generate every weights matrix and bias
weights.resize(layer.size() - 1);
bias.resize(layer.size() - 1);
for (int i = 0; i < (layer.size() - 1); ++i)
{
weights[i].create(layer[i + 1].rows, layer[i].rows, CV_32FC1);
//bias[i].create(layer[i + 1].rows, 1, CV_32FC1);
bias[i] = cv::Mat::zeros(layer[i + 1].rows, 1, CV_32FC1);
}
std::cout << "Generate weights matrices and bias, successfully!" << std::endl;
std::cout << "Initialise Net, done!" << std::endl;
}

	//initialise the weights matrix.if type =0,Gaussian.else uniform.
void Net::initWeight(cv::Mat &dst, int type, double a, double b)
{
if (type == 0)
{
randn(dst, a, b);
}
else
{
randu(dst, a, b);
}
}

//initialise the weights matrix.
void Net::initWeights(int type, double a, double b)
{
//Initialise weights cv::Matrices and bias
for (int i = 0; i < weights.size(); ++i)
{
initWeight(weights[i], 0, 0., 0.1);
}
}

	//Initialise the bias matrices.
void Net::initBias(cv::Scalar& bias_)
{
for (int i = 0; i < bias.size(); i++)
{
bias[i] = bias_;
}
}

#include"../include/Net.h"
//<opencv2\opencv.hpp>

using namespace std;
using namespace cv;
using namespace liu;

int main(int argc, char *argv[])
{
//Set neuron number of every layer
vector<int> layer_neuron_num = { 784,100,10 };

// Initialise Net and weights
Net net;
net.initNet(layer_neuron_num);
net.initWeights(0, 0., 0.01);
net.initBias(Scalar(0.05));

getchar();
return 0;

}