【Deep-ML系列】Single Neuron with Backpropagation(手写单神经元反向传播梯度下降)

import numpy as np

def sigmoid(x):
    return 1 / (1 + np.exp(-x))

def train_neuron(features: np.ndarray, labels: np.ndarray, initial_weights: np.ndarray, initial_bias: float, learning_rate: float, epochs: int) -> (np.ndarray, float, list[float]):
    features = np.array(features)
    labels = np.array(labels)
    weights = np.array(initial_weights)
    bias = initial_bias
    mse_values = []
    for epoch in range(epochs):
        # 计算预测值
        prediction = sigmoid(features.dot(weights) + bias)
        # 记录mse
        mse = np.mean((prediction - labels) ** 2)
        mse_values.append(round(mse, 4))
        # 计算梯度
        error = prediction - labels
        weight_gradient = np.dot(features.T, error * prediction * (1 - prediction))
        bias_gradient = np.sum(error * prediction * (1 - prediction))
        # 更新
        # Update weights and bias
        weights -= learning_rate * weight_gradient / len(labels)
        bias -= learning_rate * bias_gradient / len(labels)

        # Round weights and bias for output
        updated_weights = np.round(weights, 4)
        updated_bias = round(bias, 4)
    return updated_weights, updated_bias, mse_values

if __name__ == '__main__':
    features = [[1.0, 2.0], [2.0, 1.0], [-1.0, -2.0]]
    labels = [1, 0, 0]
    initial_weights = [0.1, -0.2]
    initial_bias = 0.0
    learning_rate = 0.1
    epochs = 2
    print(train_neuron(features, labels, initial_weights, initial_bias, learning_rate, epochs))

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值