李宏毅老师视频讲解 Regression:Case Study 中的Gradient Deescent Demo,供大家参考
// An highlighted block
var foo = 'bar';
import matplotlib.pyplot as plt
import numpy as np
# 以上import导入视频中demo没有出现,但为必要
# 数据
x_data = [338.,333.,328.,207.,226.,25.,179.,60.,208.,606.]
y_data = [640.,633.,619.,393.,428.,27.,193.,66.,226.,1591.]
# ydata = b + w * xdata
x = np.arange(-200,-100,1) #bias
y = np.arange(-5,5,0.1) #weight
Z = np.zeros((len(x), len(y)))
X, Y = np.meshgrid(x,y)
for i in range(len(x)):
for j in range(len(y)):
b = x[i]
w = y[j]
Z[j][i] = 0
for n in range(len(x_data)):
Z[j][i]=Z[j][i]+(y_data[n] - b - w*x_data[n])**2
Z[j][i] = Z[j][i]/len(x_data)
# ydata = b + w * xdata
b = -120 # initial b
w = -4 # initial w
lr = 0.0000001 # learning rate
#lr=1 #Adagard中lr=1
iteration = 100000 # 迭代次数
# store initial values for plotting
b_history = [b]
w_history = [w]
lr_b = 0
lr_w = 0
# Iterations
for i in range(iteration):
b_grad = 0.0
w_grad = 0.0
for n in range(len(x_data)):
b_grad = b_grad - 2.0*(y_data[n] - b - w*x_data[n])*1.0
w_grad = w_grad - 2.0*(y_data[n] - b - w*x_data[n])*x_data[n]
#update parameters
b = b - lr * b_grad
w = w - lr * w_grad
# Adagard
#lr_b = lr_b + b_grad ** 2
#lr_w = lr_w + w_grad ** 2
#b = b - lr/np.sqrt(lr_b) * b_grad
#w = w - lr/np.sqrt(lr_w) * w_grad
#store parameters for plotting
b_history.append(b)
w_history.append(w)
# plot the figure
plt.contourf(x,y, Z, 50, alpha=0.5, cmap=plt.get_cmap('jet'))
plt.plot([-188.4], [2.67] ,'x', ms=12, markeredgewidth=3, color='orange')
plt.plot(b_history, w_history,'o-', ms=3, lw=1.5, color='black')
#xlim, ylim 表示横纵坐标轴
plt.xlim(-200,-100)
plt.ylim(-5,5)
plt.xlabel(r'$b$',fontsize=16)
plt.ylabel(r'$w$',fontsize=16)
plt.show()
当lr = 0.0000001时,即学习效率(learning rate)为0.0000001
增大学习效率,当lr =0.000001时,即学习效率(learning rate)为0.000001,此时代码中为 lr = 0.0000001 # learning rate
再次增加,此时learning rate 过大,已经超出我们所设置的图像,此时代码中为 lr = 0.00001 # learning rate
最后,采用Adagard,代码中 # Adagard 那部分(#update parameters这部分隐去),此时,可以设置lr=1
That is all,Thank you!