AI学习记录 -全链接层js代码实现,可在react,vue,angular中调用

持续更新中…

全连接层的js实现,可以在浏览器上面跑的全连接层代码(绝对全网独一份,因为算法比较烂,没有进行优化哈哈哈哈)

全连接层大体代码逻辑:

1、通过配置 neuronNetworkConfig ,可以配置出神经网络的结构。
2、每个节点都是一个线性公式y=kx+b,线性公式之间加上激活函数。
3、计算预测值和真实值之间使用的是均方误差函数。
4、neuronNetworkNeuronInstance 是配置neuronNetworkConfig 之后,生成的神经网络实例对象,我们可以在这里面看到每个神经元的权重。
5、learn_rate 是学习率。
6、alltrainsTotal 是总的循环训练次数,假设有20条训练数据,20 * 200 = 4000次,训练完一次20条数据就计算一次损失率,也就是计算预测值和真实值之间的误差。
在这里插入图片描述

数学逻辑(使用矩阵乘法会更加简单,但是我没有使用,因为一开始我并不知道…)

下个月会有ppt讲解出来,对于全连接层有一个详细的解释,下面先说需要使用到的数学概念
1、权重调整使用的是梯度下降法。
2、计算误差值使用的是均方误差函数。
3、使用导数公式求解某个权重对于结果的影响方向和程度,从而决定权重是增大还是缩小,以及它的幅度(代码实现非常复杂,涉及到复合函数求导公式和链式求导法则)。
4、激活函数是sigmoid或者是tanh ,这是一个分类器,因为下一章节我使用react实现了一个简单的0-9图像识别(实际只能识别3个数字而已,因为如果创建太多的节点,在浏览器上面运行会巨慢,加上我的计算方式比较庞大,所以全连接层网络不可以太复杂太深)


let neuronNetworkConfig = [];  // 配置神经网络结构
let neuronNetworkNeuronInstance = [];  // 配置神经网络结构
let learn_rate = 0.001;  // 学习率
let alltrainsTotal = 200
let trainNumTotal = new Array(alltrainsTotal).fill('');
let trainNum = 0;

function setNeuronNetworkConfig(value) {
  neuronNetworkConfig = value;
}
function setNeuronNetworkNeuronInstance(value) {
  neuronNetworkNeuronInstance = value;
}

// ====================================================================

// 初始化神经网络节点 
function createNeuron(numW = 0) {
  let w = [];
  for (let i = 0; i < numW; i++) {
    w.push(generateNormalRandom(0, 1))
  }
  return {
    w: w,
    b: generateNormalRandom(0, 1),
    // 当前的预测值
    x: 0
  }
}

// 创建神经网络,创建一个有固定长度的输入层,输出层的神经网络,不定的隐藏层
function createNetwork() {
  neuronNetworkConfig.map((singleLayerNeuronNumber, index) => {
    let singleLayer = []
    for (let i = 0; i < singleLayerNeuronNumber; i++) {
      if (index - 1 < 0) {
        singleLayer.push(createNeuron())
      } else {
        singleLayer.push(createNeuron(neuronNetworkConfig[index - 1]))
      }
    }
    neuronNetworkNeuronInstance.push(singleLayer);
  })
  // 给每一个神经元都设置前一层和后一层的指向
  for (let i = 0; i < neuronNetworkNeuronInstance.length; i++) {
    let currentLayer = neuronNetworkNeuronInstance[i];
    // let nextLayer = [];
    // let beforeLayer = []
    // if (neuronNetworkNeuronInstance.length >= 2 && neuronNetworkNeuronInstance.length - 1 > i) {
    //   nextLayer = neuronNetworkNeuronInstance[i + 1];
    // }
    // if (i > 0 && neuronNetworkNeuronInstance.length >= 2) {
    //   beforeLayer = neuronNetworkNeuronInstance[i - 1];
    // }

    for (let j = 0; j < currentLayer.length; j++) {
      let singleNeuron = currentLayer[j];
      singleNeuron.layerNum = i;
      singleNeuron.inLayerNum = j;
      // singleNeuron.target = false;
      if (i === 0) {
        singleNeuron.type = "start"
      } else if (i === neuronNetworkNeuronInstance.length - 1) {
        singleNeuron.type = "end"
      } else {
        singleNeuron.type = "hidden"
      }
    }
  }
  return neuronNetworkNeuronInstance;
}

// 正向传播才用到的激活函数
function sigmoid(x) {
  return 1 / (1 + Math.exp(-x));
}

// 反向传播的求导数代码
function deriv_sigmoid(x) {
  let fx = sigmoid(x)
  return fx * (1 - fx)
}
const tanh = (x) => {
  return Math.tanh(x);
}
function tanhDerivative(x) {
  const tanhX = tanh(x);
  return 1 - tanhX * tanhX;
}

// 计算损失率,损失率越低,越成功
// y_true 是正确值
// y_pred 是实际值
// 計算所有輸出的平均,然後加起來再計算平均
function mse_loss(y_true, y_pred) {
  let sum_avarage = [];
  y_true.map((y_true_item, y_true_item_index) => {
    let totalNum = 0;
    y_true_item.map((one, oneIndex) => {
      totalNum = totalNum + Math.pow((one - y_pred[y_true_item_index][oneIndex]), 2)
    });
    let avarage = totalNum / y_true_item.length;
    sum_avarage.push(avarage);
  });
  let sum = 0;
  sum_avarage.map((avarage) => {
    sum = sum + avarage;
  })
  return sum / sum_avarage.length;
}

// 生成权重随机数 - 符合高斯分布 - 用于初始化权重
function generateNormalRandom(mean, stddev) {
  let u = 0, v = 0;
  while (u === 0) u = Math.random(); // 生成 (0,1) 之间的随机数
  while (v === 0) v = Math.random(); // 生成 (0,1) 之间的随机数

  const z = Math.sqrt(-2.0 * Math.log(u)) * Math.cos(2.0 * Math.PI * v);
  return mean + stddev * z;
}


// 在正向传播过程中,计算每个神经元的输入值,即w1*x1 + w2*x2 + w3*x3 + b = 每个神经元输入值 
function getWeightTotal(previousLayer, currentNeuron) {

  let sum = 0;
  for (let i = 0; i < previousLayer.length; i++) {
    let lastNeuron = previousLayer[i];
    sum = sum + lastNeuron.x * currentNeuron.w[i];
  }
  return tanh(sum + currentNeuron.b);
}

// 正向传播函数
// allInputDataList 是输入数据
// 最终 return 一个实际值
function feedforward(allInputDataList) {
  let predEndLayerDataList = [];
  for (let z = 0; z < allInputDataList.length; z++) {
    let singleData = allInputDataList[z];

    // 第一层赋值
    for (let i = 0; i < singleData.length; i++) {
      neuronNetworkNeuronInstance[0][i].x = singleData[i];
    }
    let predSingleEndLayerData = [];
    // 后面的赋值操作
    for (let i = 1; i < neuronNetworkNeuronInstance.length; i++) {
      let singleLayer = neuronNetworkNeuronInstance[i];

      for (let j = 0; j < singleLayer.length; j++) {
        let neuron = singleLayer[j];
        // 上一层的神经元(获取上一层的h),当前单个神经元(当前神经元的w)
        // 最后一层才会记录
        let h = getWeightTotal(neuronNetworkNeuronInstance[i - 1], neuron);
        neuron.x = h;
        if (i === neuronNetworkNeuronInstance.length - 1) {
          // getWeightTotal 获取当前神经元的输出h
          predSingleEndLayerData.push(h);
        }
      }
    }
    predEndLayerDataList.push(predSingleEndLayerData);
  }
  return predEndLayerDataList;
}

// 当调用这个方法,代表开始训练
// 以下的 neuron 变量代表神经网络的其中一个神经元
// allInputDataList 是所有的训练数据
// all_y_trues 是所有数据的預測值
// callbackLoss 是将损失率回调到外面去
// 训练的次数
function train(allInputDataList, all_y_trues, callbackLoss, callBackFinish) {
  console.log("all_y_trues", all_y_trues)
  trainNumTotal.map((i, index) => {

    setTimeout(() => {
      trainNum = index;
      console.log("trainNum", trainNum)
      // 将数据集的每单个数据输入神经网络,这里的目的是形成每一个神经元的值 x
      for (let z = 0; z < allInputDataList.length; z++) {
        let singleData = allInputDataList[z];

        // 第一层赋值
        for (let i = 0; i < singleData.length; i++) {
          neuronNetworkNeuronInstance[0][i].x = singleData[i];
        }
        // 后面的赋值操作
        for (let i = 1; i < neuronNetworkNeuronInstance.length; i++) {
          let singleLayer = neuronNetworkNeuronInstance[i];

          for (let j = 0; j < singleLayer.length; j++) {
            let neuron = singleLayer[j];
            // 拿到上一层神经元的输入值和当前节点的对应权重相乘
            neuron.x = getWeightTotal(neuronNetworkNeuronInstance[i - 1], neuron);
          }
        }
        // 上面代码完成了一次正向输出, 下面代码完成一次反向传播
        trainSingleData(all_y_trues[z]);
      }

      function trainSingleData(single_y_trues) {
        // 单独计算计算每一个神经元
        let single_d_ypred_d_w = [];
        let single_d_ypred_d_h = [];

        // 从最后一层开始遍历
        for (let i = neuronNetworkNeuronInstance.length - 1; i >= 0; i--) {

          let currentLayer = neuronNetworkNeuronInstance[i];

          // 遍历当前层的每一个神经元
          for (let j = 0; j < currentLayer.length; j++) {
            let currentNeuron = currentLayer[j];
            // 对每一个神经元的每一个w求导数
            single_d_ypred_d_w = []
            // 对每一个神经元求链式求导所需要的中间导数
            single_d_ypred_d_h = []
            // 遍历前一层的所有神经元
            // 第一层没有权重,所以需要 i > 0
            if (i > 0) {
              // previousSingleLayer 是正在遍历的这一层的前面一层
              let previousSingleLayer = neuronNetworkNeuronInstance[i - 1];
              for (let q = 0; q < previousSingleLayer.length; q++) {
                let neuronInPreviousLayer = previousSingleLayer[q]
                single_d_ypred_d_w.push(neuronInPreviousLayer.x * tanhDerivative(currentNeuron.x)); // 求 d_ypred_d_w
              }
              currentNeuron.single_d_ypred_d_w = single_d_ypred_d_w
            }
            // 遍历当前神经元的每一个权重
            for (let z = 0; z < currentNeuron.w.length; z++) {
              let current_w = currentNeuron.w[z];
              single_d_ypred_d_h.push(current_w * tanhDerivative(currentNeuron.x)); // 求 d_ypred_d_h
            }
            currentNeuron.single_d_ypred_d_h = single_d_ypred_d_h
            currentNeuron.layer_d_ypred_d_b = tanhDerivative(currentNeuron.x)
          }
        }

        for (let i = 0; i < single_y_trues.length; i++) {
          let currentNeuron = neuronNetworkNeuronInstance[neuronNetworkNeuronInstance.length - 1][i];
          let d_L_d_ypred = -2 * (single_y_trues[i] - currentNeuron.x);
          currentNeuron.d_L_d_ypred = d_L_d_ypred;
        }

        // gpt帮我写的代码,生成树形结构的所有链路
        // js有个二维数组, 第一维数组长度不定, 第二维数组长度不定, 分别从第二维数组中取一个字符进行组合, 写一个方法, 输出所有的组合
        // [[1, 2, 3], [4, 5, 6], [7, 8, 9]]  =>  输出所有的组合
        function generateCombinations(treeOneTopArrays) {
          const result = [];
          function generate(index, currentCombination) {
            if (index === treeOneTopArrays.length) {
              result.push([...currentCombination]);
              return;
            }
            const currentLayer = treeOneTopArrays[index];
            for (let i = 0; i < currentLayer.length; i++) {
              currentCombination.push(currentLayer[i]);
              generate(index + 1, currentCombination);
              currentCombination.pop();
            }
          }
          generate(0, []);
          return result;
        }

        // 传入一个坐标,将单独的一个节点当作是树节点的顶层节点,生成一棵树
        function createAllLinkInOneNode(currentLayerNum, currentNeuronNum, neuronNetworkNeuronInstance) {
          let firstLayer = [neuronNetworkNeuronInstance[currentLayerNum][currentNeuronNum]]
          let otherLayer = []
          for (let i = currentLayerNum + 1; i < neuronNetworkNeuronInstance.length; i++) {
            otherLayer.push(neuronNetworkNeuronInstance[i])
          }
          let arr = [firstLayer, ...otherLayer];
          let arr2 = generateCombinations(arr);
          // 最后生成顶层节点的是所有链路 [[{}, {}, {}, {}],    [{}, {}, {}, {}],     [{}, {}, {}, {}] ......... ]
          /*
            0: {w: Array(90), b: -0.7051689004595468, x: 0.9999455939930807, layerNum: 1, inLayerNum: 8, …}
            1: {w: Array(10), b: -0.5900213335572786, x: 0.30126811684417115, layerNum: 2, inLayerNum: 0, …}
            2: {w: Array(10), b: 0.731839375917181, x: 0.051223537096143065, layerNum: 3, inLayerNum: 0, …}
          */
          return arr2;
        }
        // 先遍历第一层
        for (let i = 0; i < neuronNetworkNeuronInstance.length; i++) {
          for (let j = 0; j < neuronNetworkNeuronInstance[i].length; j++) {
            // 给每个需要求的导数都建立一个节点,因为我们需要导数链路相乘
            let data = createAllLinkInOneNode(i, j, neuronNetworkNeuronInstance);
            calculate(data);
          }
        }

        // 这里完成一个节点的所有权重的计算,得到一个新权重并覆盖原来的旧值
        function calculate(allLinkNodePath) {
          // 遍历一下所有链路

          // 第一层神经元的权重个数, different_sum_in_different_w 是个二维数组,第一层就是每一个权重,第二层就是每一个权重的所有链路的导数的集合
          let different_sum_in_different_w = [];
          let different_sum_in_different_b = [];
          let firstNeuron = null;
          for (let i = 0; i < allLinkNodePath[0][0].w.length; i++) {
            different_sum_in_different_w.push([]);
          }
          for (let i = 0; i < allLinkNodePath.length; i++) {

            let singleLinkPathNode = allLinkNodePath[i];
            let result_w = 1;
            let result_b = 1;
            firstNeuron = singleLinkPathNode[0];
            // 因为第一层w是空数组,所进不来,
            // 遍历第一个节点的所有权重,
            // 求解第一个神经元的每一个w
            for (let j = 0; j < firstNeuron.w.length; j++) {
              // 进入权重计算
              // 对当前神经元指定权重求偏导数
              result_w = 1;
              result_b = 1;
              // 我们只有第一个神经元用到 d_ypred_d_w
              let current_dw = firstNeuron.single_d_ypred_d_w[j];
              let current_db = firstNeuron.layer_d_ypred_d_b;
              result_w = result_w * current_dw;
              result_b = result_b * current_db;
              // 求解当前组合的梯度,singleLinkPathNode=[第一层第n个神经元, 第二层第n个神经元, 第三层第n个神经元]
              // 下面循环只是求解  <一个神经元权重>   的一种链路的梯度
              for (let z = 0; z < singleLinkPathNode.length; z++) {
                if (z >= 1) {
                  // 获取上一层的神经元,因为当前神经元与上一层所有神经元都有联系
                  let index = singleLinkPathNode[z - 1].inLayerNum;
                  let d_ypred_d_h = singleLinkPathNode[z].single_d_ypred_d_h[index];
                  result_w = result_w * d_ypred_d_h;
                  result_b = result_b * d_ypred_d_h;
                }
                if (singleLinkPathNode[z].type === "end") {
                  // 这里可以求出每一条链路的梯度
                  result_w = result_w * singleLinkPathNode[z].d_L_d_ypred * learn_rate;

                  result_b = result_b * singleLinkPathNode[z].d_L_d_ypred * learn_rate;
                  different_sum_in_different_w[j].push(result_w);

                  // b只需要push一次,因为一个节点有多个权重,但是只有偏置b,所以计算一次就够了
                  if (j === 0) {
                    different_sum_in_different_b.push(result_b);
                  }
                }
              }

            }
          }
          // 每计算完一个神经元的所有权重,就对这个神经元进行全体赋值
          for (let j = 0; j < firstNeuron.w.length; j++) {

            let sum_w = 0;
            // 将顶层神经元的其中一个权重的所有链路的导数加起来 / 链路个数
            for (let i = 0; i < different_sum_in_different_w[j].length; i++) {
              sum_w = sum_w + different_sum_in_different_w[j][i];
            }
            sum_w = sum_w / different_sum_in_different_w[j].length;
            // 这里得到新的权重
            neuronNetworkNeuronInstance[firstNeuron.layerNum][firstNeuron.inLayerNum].w[j] -= sum_w;

          }

          let sum_b = 0;
          for (let i = 0; i < different_sum_in_different_b.length; i++) {
            sum_b = sum_b + different_sum_in_different_b[i];
          }
          // 这里得到新的偏置
          neuronNetworkNeuronInstance[firstNeuron.layerNum][firstNeuron.inLayerNum].b -= sum_b / different_sum_in_different_b.length;

        }

      }

      if (trainNum % 10 === 0) {
        // allInputDataList 是多条输入值
        let y_preds = feedforward(allInputDataList);
        // y_preds 是多条预测结果
        // all_y_trues 是最理想的结果
        console.log("all_y_trues", all_y_trues);
        let loss = mse_loss(all_y_trues, y_preds);
        if (callbackLoss) { callbackLoss(loss); }
      }

      if (callBackFinish && trainNum === alltrainsTotal - 1) {
        console.log("训练完成", neuronNetworkNeuronInstance);
        callBackFinish(JSON.parse(JSON.stringify(neuronNetworkNeuronInstance)));
      }

    }, 0)
  })



}

// train(data, all_y_trues);
console.log("end ---- neuronNetworkNeuronInstance", neuronNetworkNeuronInstance);

function getDataFromModel(singleData) {

  let result = []
  // 第一层赋值
  for (let i = 0; i < singleData.length; i++) {
    neuronNetworkNeuronInstance[0][i].x = singleData[i];
  }
  // 后面的赋值操作
  for (let i = 1; i < neuronNetworkNeuronInstance.length; i++) {
    let singleLayer = neuronNetworkNeuronInstance[i];

    for (let j = 0; j < singleLayer.length; j++) {
      let neuron = singleLayer[j];
      neuron.x = getWeightTotal(neuronNetworkNeuronInstance[i - 1], neuron);
      if (i === neuronNetworkNeuronInstance.length - 1) {
        result.push(neuron.x)
      }
    }
  }

  return result;
}

const getNeuronNetworkNeuronInstance = () => {
  return neuronNetworkNeuronInstance;
}

// console.log("结果", getDataFromModel([10, 10]))

// 执行顺序从前到后
export { setNeuronNetworkConfig, createNetwork, train, getDataFromModel, setNeuronNetworkNeuronInstance, getNeuronNetworkNeuronInstance } 
  • 2
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Vue, React, and Angular are all popular JavaScript frameworks used for building web applications. Vue is a relatively lightweight framework that is known for its simplicity and ease of use. It is often favored by developers who are new to JavaScript frameworks or who prefer a more minimalist approach to development. Vue also has a strong focus on performance and can be used for building both small and large-scale applications. React is developed and maintained by Facebook and is widely used for building complex user interfaces. It uses a virtual DOM and a one-way data flow model which enables it to efficiently manage state and handle large amounts of data. React can be used with other libraries and frameworks, making it highly flexible and customizable. Angular is a full-featured framework developed by Google. It is known for its powerful features and its ability to handle large and complex applications. Angular uses a two-way data binding approach and has a steep learning curve compared to Vue and React. However, it offers a wide range of features, including dependency injection, routing, and animations, making it a popular choice for enterprise-level applications. Ultimately, the choice between Vue, React, and Angular will depend on the specific needs and preferences of the developer or development team. Each framework has its own strengths and weaknesses, and the decision should be based on the project requirements, available resources, and the experience level of the developers involved.
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值