BP神经网络python实现

import numpy as np
import matplotlib.pyplot as plt

def relu(x):
    return np.maximum(0, x)

def relu_derivative(x):
    return np.where(x > 0, 1, 0)

def init_data(x_dim, pam, x_test_dim):
    X = np.random.uniform(0.0, 1.0, x_dim)
    X_test = np.random.uniform(0.0, 1.0, x_test_dim)
    # print(X)
    Y = X.dot(np.array(pam))
    Y=np.expand_dims(Y, axis=0).T
    # X=np.expand_dims(X,axis=0)
    #  print(X,Y)
    Y_test = X_test.dot(np.array(pam))
    Y_test = np.expand_dims(Y_test, axis=0).T
    return X, Y, X_test, Y_test

def initialize(input_size, hidden_size, output_size):
    W1 = np.random.randn(input_size, hidden_size)
    b1 = np.zeros(hidden_size)
    W2 = np.random.randn(hidden_size, output_size)
    b2 = np.zeros(output_size)
    return W1, b1, W2, b2

def forward(X, W1, b1, W2, b2):
    Z1 = np.dot(X, W1) + b1
    A1 = relu(Z1)
    Z2 = np.dot(A1, W2) + b2
    A2 = relu(Z2)
    return Z1, A1, Z2, A2

def loss(Y, Z2):
    return np.mean((Y - Z2) ** 2)

def backpropagation(X, Y, Z1, A1, Z2, A2,  W1, b1, W2, b2, learning_rate):


    # w3 = 2 * (A2 - Y) / 10
    # dW2 = np.dot(A1.T, w3)
    # # print(dW2.shape)
    # db2 = np.sum(w3, axis=0)
    # # print(db2.shape)
    # dW1 = np.dot(X.T, (w3 * W2.T * relu_derivative(Z1)))
    # # print(dW1.shape)
    # db1 = np.sum(w3 * W2.T * relu_derivative(Z1), axis=0)
    # # print(db1.shape)

    w3 = 2 * (A2 - Y)
    dW2 = np.dot(A1.T, w3 * relu_derivative(Z2))
    # print(dW2.shape)
    db2 = np.sum(w3, axis=0)
    # print(db2.shape)
    w4 = 2 * (A2 - Y) * relu_derivative(Z2)
    dW1 = np.dot(X.T, (w4 * W2.T * relu_derivative(Z1)))
    # print(dW1.shape)
    db1 = np.sum(w3 * W2.T * relu_derivative(Z1), axis=0)
    #  print(db1.shape)

    W1 -= learning_rate * dW1
    b1 -= learning_rate * db1
    W2 -= learning_rate * dW2
    b2 -= learning_rate * db2

    return W1, b1, W2, b2

def train(input_size, output_size, X, Y, hidden_size, learning_rate, epochs):
    W1, b1, W2, b2 = initialize(input_size, hidden_size, output_size)
    b1 = np.expand_dims(b1, axis=0)
    b2 = np.expand_dims(b2, axis=0)
    # print(W1.shape, W2.shape, b1.shape, b2.shape)
    loss_list = []
    for epoch in range(epochs):
        Z1, A1, Z2, A2 = forward(X, W1, b1, W2, b2)
        loss1 = loss(Y, A2)
        W1, b1, W2, b2 = backpropagation(X, Y, Z1, A1, Z2, A2, W1, b1, W2, b2, learning_rate)
        print("已训练{}轮".format(epoch))
        loss_list.append(loss1)

    return W1, b1, W2, b2, loss_list

def predict(X_text ,Y_test, W1, b1, W2, b2):
    Z1, A1, Z2, A2 = forward(X_text, W1, b1, W2, b2)
    loss1 = loss(Y_test, A2)
    print("预测值的损失为:", loss1)
    return Z2

x_dim = (30, 3)
x_test_dim = (5, 3)
pam = [1.3, 0.5, 1.5]
X, Y, X_test, Y_test = init_data(x_dim, pam, x_test_dim)
# 训练神经网络
hidden_size = 25
learning_rate = 0.01
epochs = 100
input_size = x_dim[1]
output_size = 1
W1, b1, W2, b2, loss_list = train(input_size, output_size, X, Y, hidden_size, learning_rate, epochs)
# 绘制loss曲线
plt.figure()
x = np.arange(0, epochs)
plt.title("loss")
plt.plot(x, loss_list)
plt.show()
# 预测
predictions = predict(X_test, Y_test, W1, b1, W2, b2)
print("预测值与真实值的差值:\n", predictions - Y_test)

# 运行结果

预测值的损失为: 0.13031508881235027
预测值与真实值的差值:
 [[0.10231436]
 [0.29744002]
 [0.30471985]
 [0.17542227]
 [0.65498816]]
 

# 损失曲线

 

  • 4
    点赞
  • 32
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值