一个多层感知机C++的简单实现

看了几篇文章自己尝试写了一个。我测试下来是收敛了。


代码写的很烂,编程水平不高。


---之后我会更新下---


//
#include <iostream>
#include"math.h"
#include <vector>
using namespace std;

float rand_(){
   return  rand() / float(RAND_MAX);
}
class Neuron;
//连接类
class Connect{
public:
    float w=rand_();
    Neuron *target;
    bool vector=false;
    //表示连接是正向的 还是反向的
    //其实是连接还是被连接
    Connect(Neuron *output){
        target = output;
    }
    ~Connect(){ }
};

//ative function
//激活函数
//这里用的是 sysmoid funciton

float tan_h(float val){
    return (1/(1+exp(val)));
}

//神经元的定义

class Neuron{
public:
    int layer_index =  -1; //layer
    int neuron_index = -1  ;//index
    //layer_index 用来描述 神经元所在的层的位置
    //neuron_index 用来 描述神经元在层的位置
    float learning_rate = 6;
    //学习速率
    typedef float (*TamplteFunc)(float sum);
    float x=rand_();
    //神经元的x
    bool sigal = false;
    vector<Connect*> connections;
    //神经元的链接
    TamplteFunc activefunc;
    Neuron(int index_,int neuron_index_,TamplteFunc pactivefunc=&tan_h){
        //构造函数
        // 两个index描述了神经元的ID
        neuron_index = neuron_index_;
        layer_index = index_;
        //  cout<<"("<<index_<<","<<neuron_index_<<") ";
        x=rand_();
        activefunc = pactivefunc; };
    //默认函数
    void ConnectOthers(Neuron *target,bool I_or_O){
        Connect *connection  = new Connect(target);
        connection->vector  = I_or_O;
        //如果是正向的链接
        Connect *connection_for_target  = new Connect(this);
        connection_for_target->vector  = !I_or_O;
        //那对于另一个神经元来说是反向的
        connections.push_back(connection);
        target->connections.push_back(connection_for_target);
    }

    float Active(float b,bool isRevise = true){
        float sum = 0 ;
        for(int i=0;  (i<connections.size()) ;i++)
        {
            if( connections[i]->vector ==false )
           sum+=  (connections[i]->w)*(connections[i]->target->x);
        }
        sum+=  b ;
        //add all the connections of this Neuron by x * w of connections.
        float val  = activefunc(sum);
        if(val>1)
        {
            //do the errror process
            abort();

        }
        if(isRevise == true)
        x = val;
        return val;
    }

    float ComputeError(float &b){
        float y =  Active(b,false);
     //   cout<<this->neuron_index<<" y:"<<y;
        // debug
         float error =(x - y)*(y*(1-y));
        //梯度误差
        for(int i = 0 ; i<connections.size();i++)
        {
            if( connections[i]->vector ==false ) {

                connections[i]->w = connections[i]->w -learning_rate * error *connections[i]->target->x;
                b=b-learning_rate * error;
            }

        }
        return error;
    }
    ~Neuron(){
        for(int i=0;  i<connections.size()  ;i++)
        {
            delete(connections[i]);
        }
    };
};

class Layer{
public:
    int Layer_type = 1 ;
    int index= -1;
    // 0 input layer
    // 1 hidden layer
    // 2 output layer
    vector<Neuron*> neurons;
    float b=rand_();
//float b = 0;
// this_layer.x =  active( sum( conections(input).w * neuron(input).x + last_layer.b ))
    Layer(int Num,int index_) {

        b=rand_();
        index=  index_;

        for (int i = 0; i < Num; i++) {
            Neuron *Neuron_new   = new Neuron(index_,i);
            neurons.push_back(Neuron_new);
        }

        cout<<endl;

    }
//连接哪一层 默认是全连接
//which layer to link . the defult is full-connected.
     void ConnectLayers(Layer *layer, const int method = 0){
         for(int i = 0 ; i < this->neurons.size() ;i++ ){
             for(int j = 0 ; j < layer->neurons.size() ; j++){
                 this->neurons[i]->ConnectOthers(layer->neurons[j], false);
                 // the second parameter is the method of link.
             }
         }
    }

    void forward(float b_last_layer){
        for(int i = 0 ;i< neurons.size();i++)
        {
            neurons[i]->Active(b_last_layer);
        }

    }
    void backward(float b_last_layer)
    {

        for(int i = neurons.size()-1 ;i>=0;i--)
        {
            neurons[i]->ComputeError(b_last_layer);
        }

    }

    }
    ~Layer(){
        for (int i = 0; i < neurons.size(); i++) {
          delete (neurons[i]);
        }
    }

};
class NeuralNetwork{
public:
    vector<Layer*> Layers;
    vector<int> structures_;
    NeuralNetwork(
            const vector<int> structures,
            float error = 0.02,
            float step_d_c = 1

    ){
        structures_ = structures;
        for(int i = 0;i<structures.size();i++)
        {
            Layers.push_back(new Layer(structures[i],i));
            if(i>0)
            {
                Layers[i]->ConnectLayers(    Layers[i-1]);
                //与上层的layer连接
                //默认全连接
            }
        }
    }
    void forwardTrans(bool training = true){
        //只要到hidden层
        for(int i = 0 ; i<Layers.size()-int(training);  i++)
        {
            if(i>0 ) {
                Layers[i]->forward(Layers[i-1]->b);
            }
        }
    }
    void backwardTrans(){
        for(int i = Layers.size()-1 ; i>=1;  i--)
        {
                Layers[i]->backward(Layers[i-1]->b);
        }
    }
   ~NeuralNetwork(){
       for(int i = 0 ;Layers.size();i++ )
       {
           delete(Layers[i]);
       }
   }
    bool  Training(const vector<float> &x, const vector<float> &target){

        if(x.size() != Layers[0]->neurons.size()) return false;
        for(int i = 0 ;i<x.size();i++  ){
            Layers[0]->neurons[i]->x = x[i];
        }
        int layer_index_output = Layers.size() - 1;
        if(target.size() != Layers[layer_index_output]->neurons.size()) return false;
        for(int i = 0 ;i<target.size();i++  ){
            Layers[layer_index_output]->neurons[i]->x = target[i];
        }
        forwardTrans();
        backwardTrans();
        return true;
    }
    bool Predict(const vector<float> &x, vector<float> &target){
        if(x.size() != Layers[0]->neurons.size()) return false;
        for(int i = 0 ;i<x.size();i++  ){
            Layers[0]->neurons[i]->x = x[i];
        }
        int layer_index_output = Layers.size() - 1;
        forwardTrans(false);
        if(target.size() != Layers[layer_index_output]->neurons.size()) return false;
        for(int i = 0 ;i<target.size();i++  ){
            target[i]   = Layers[layer_index_output]->neurons[i]->x;
        }
    }
};

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值