线性回归01
1.概念(想尽办法得到一条直线,使得这条直线在下面这些点的拟合度最高)
下图的平方是为了得到误差的最小值(梯度下降算法,求导),加上1/2是为了求导好约掉2便于计算,当然1/2也可以不加。然后将所有误差求和,得到总样本误差,最后得到loss损失函数。
loss(w0,w1)
图形代码
import numpy as np
import matplotlib.pyplot as mp
from mpl_toolkits.mplot3d import axes3d
xs = np.array([0.5,0.6,0.8,1.1,1.4])
ys = np.array([5.0,5.5,6.0,6.8,7.0])
# 将w0和w1拆分成500个点网格
n = 500;
w0_grid,w1_grid = np.meshgrid(np.linspace(-3,10,n),
np.linspace(-3,10,n))
# print(w0_grid) 500x500
loss = 0.;
# 得到损失函数,
for x,y in zip(xs,ys):
loss = loss + 1/2 * (w0_grid + w1_grid * x - y)**2
# 画图三维
mp.figure('LOOS Function',facecolor='lightgray')
ax3d = mp.axes(projection = '3d')
ax3d.set_xlabel('w0')
ax3d.set_ylabel('w1')
ax3d.set_zlabel('loss')
ax3d.plot_surface(w0_grid,w1_grid,loss,cstride=30,rstride=30,cmap='jet')
mp.show()
梯度下降
(根据学习率(lrate)来移动,找到最小点),w0为一个x方向的变量。多维的话隐函数求导
00000000000000
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import axes3d
# xs = np.array([0.5,0.6,0.8,1.1,1.4])
# ys = np.array([5.0,5.5,6.0,6.8,7.0])
# # 将w0和w1拆分成500个点网格
# n = 500;
# w0_grid,w1_grid = np.meshgrid(np.linspace(-3,10,n),
# np.linspace(-3,10,n))
# # print(w0_grid) 500x500
# loss = 0.;
# # 得到损失函数,
# for x,y in zip(xs,ys):
# loss = loss + 1/2 * (w0_grid + w1_grid * x - y)**2
#
# # 画图三维
# plt.figure('LOOS Function',facecolor='lightgray')
# ax3d = plt.axes(projection = '3d')
# ax3d.set_xlabel('w0')
# ax3d.set_ylabel('w1')
# ax3d.set_zlabel('loss')
# ax3d.plot_surface(w0_grid,w1_grid,loss,cstride=30,rstride=30,cmap='jet')
# plt.show()
# 训练数据
train_x = np.array([0.5,0.6,0.8,1.1,1.4])
train_y = np.array([5.0,5.5,6.0,6.8,7.0])
# 给定初始值
w0 = 1 ; w1 = 1;
# 迭代次数,也就是下降次数
times = 1000
# 学习率
lrate = 0.01
for i in range(1,times + 1):
# w0与w1的偏导为d0,d1
d0 = (w0 + w1 * train_x - train_y).sum()
d1 = (train_x *(w0 + w1 * train_x - train_y)).sum()
# 根据梯度下降的公式,更新w0与w1
w0 = w0 - lrate * d0
w1 = w1 - lrate * d1
print("w0:",w0)
print("w1:",w1)
# 通过w0与我的模型参数,绘制回归线 y =w1x+w0,矩阵相乘
linex = np.linspace(train_x.min(),train_x.max(),100)
liney = w1*linex + w0
print(linex)
print(liney)
# 画训练集的散点图
plt.figure('Linear Regression',facecolor='lightgray')
plt.title('Linear Regression',fontsize=18)
plt.grid(linestyle = ':')
plt.scatter(train_x,train_y,s=80,marker='o',color = 'dodgerblue',label='Samples')
plt.plot(linex,liney,color='orangered',linewidth =2,label='Regression Line')
plt.legend()
plt.show()
结果
线性回归2
分析上诉算法w0,w1,loss函数的变化
画出参数评估图
plt.subplot(311)
plt.grid(linestyle=':')
plt.ylabel(r'$w+0$',fontsize=14)
plt.plot(epoches,w0s[0:-1],color='red')
plt.subplot(312)
plt.grid(linestyle=':')
plt.ylabel(r'$w+1$',fontsize=14)
plt.plot(epoches,w1s[0:-1],color='yellow')
plt.subplot(313)
plt.grid(linestyle=':')
plt.ylabel(r'$loss_s$',fontsize=14)
plt.plot(epoches,loss_s,color='blue')
plt.show()