python sklearn 手动线性回归
"""
线性回归 使用 梯度下降方式
实现线性回归
"""
import numpy as np
import sklearn.preprocessing as sp
import matplotlib.pyplot as mp
from mpl_toolkits.mplot3d import Axes3D
# 准备测试数据
train_x = np.array([0.5, 0.6, 0.8, 1.1, 1.4])
train_y = np.array([5.0, 5.5, 6.0, 6.8, 7.0])
# 参数,损失值,循环总次数
w0, w1, losses, epoch = [1], [1], [], []
times = 1000
lr = 0.01 # 学习率
# 循环梯度下降 求 W0,w1
for i in range(1, times + 1):
epoch.append(i)
# 求损失值
loss = ((w0[-1] + w1[-1] * train_x - train_y) ** 2).sum()
print('{:4}> w0={:.8f}, w1={:.8f}, loss={:.8f}'.format(
i, w0[-1], w1[-1], loss))
losses.append(loss)
# 求损失函数 关于 w0与w1的偏导数 (计算梯度)
d0 = (w0[-1] + w1[-1] * train_x - train_y).sum() # 对w0的偏导
d1 = (train_x * (w0[-1] + w1[-1] * train_x - train_y)).sum() # 对w1的偏导求和
# 根据公式 更新w0 w1 (学习率*梯度)
w0.append(w0[-1] - lr * d0)
w1.append(w1[-1] - lr * d1)
print("W0:", w0[-1])
print("W1:", w1[-1])
# 根据结果 计算回归值
line_x = np.linspace(np.min(train_x), np.max(train_x), 500)
line_y = line_x * w1[-1] + w0[-1] # y = kx + b
# 绘制 训练数据散点图 以及回归线
mp.figure("Linger Regression", facecolor="lightgray")
mp.title("linger Regression", fontsize=18)
mp.grid(linestyle=":")
mp.scatter(train_x, train_y, s=80, marker="o", color="b", label="samples")
mp.plot(line_x, line_y, color="r", label="Regression", linewidth=3)
mp.legend()
# 训练过程趋势图
mp.figure("Training Progress", facecolor="lightgray")
mp.title("Training Progress", fontsize=18)
# W0趋势图
mp.subplot(311)
mp.grid(linestyle=":")
mp.ylabel(r"$w_0$", fontsize=14)
mp.plot(epoch, w0[:-1], color="b", label=r"$w_0$")
mp.legend()
# W1趋势图
mp.subplot(312)
mp.grid(linestyle=":")
mp.ylabel(r"$w_1$", fontsize=14)
mp.plot(epoch, w1[:-1], color="orange", label=r"$w_1$")
mp.legend()
# Loss趋势图
mp.subplot(313)
mp.grid(linestyle=":")
mp.ylabel("loss", fontsize=14)
mp.plot(epoch, losses, color="r", label="loss")
mp.legend()
mp.tight_layout()
# 准备 三维数据
n = 500
w0_grid, w1_grid = np.meshgrid(
np.linspace(0, 9, n),
np.linspace(0, 3.5, n))
loss_grid = 0
for x, y in zip(train_x, train_y):
loss_grid += (w0_grid + w1_grid * x - y) ** 2 / 2
# 绘制 三维过程图(loss)
fig = mp.figure("Loss", facecolor="lightgray")
mp.title("3D Surface", fontsize=18)
ax3d = Axes3D(fig)
ax3d.set_xlabel("w0")
ax3d.set_ylabel("w1")
ax3d.set_zlabel("loss")
mp.tick_params(labelsize=10)
ax3d.plot_surface(w0_grid, w1_grid, loss_grid, cstride=30, rstride=30, cmap="jet", alpha=0.3)
ax3d.plot(w0[:-1], w1[:-1], losses, "o-", color="orangered", label='BGD')
# 绘制梯度下降 等高线图
mp.figure('Batch Gradient Descent', facecolor='lightgray')
mp.title('Batch Gradient Descent', fontsize=20)
mp.xlabel('x', fontsize=14)
mp.ylabel('y', fontsize=14)
mp.tick_params(labelsize=10)
mp.grid(linestyle=':')
mp.contourf(w0_grid, w1_grid, loss_grid, 10, cmap='jet')
cntr = mp.contour(w0_grid, w1_grid, loss_grid, 10,
colors='black', linewidths=0.5)
mp.clabel(cntr, inline_spacing=0.1, fmt='%.2f',
fontsize=8)
mp.plot(w0, w1, 'o-', c='orangered', label='BGD')
mp.legend()
mp.tight_layout()
mp.show()