梯度下降
目标函数求解J(θ),初始化θ(随机初始化,可以初始化为0),沿着负梯度方向下降,更新后θ让J(θ)更小,学习率一般从大到小给定
import numpy as np
import matplotlib.pyplot as plt
data = np.genfromtxt("data.csv", delimiter=",")
x_data = data[:,0]
y_data = data[:,1]
lr = 0.01
b = 0
k = 0
epochs = 50
def eroor(x_data,y_data,b,k):
total_error=0
for i in range(len(x_data)):
total_error+=(k*x_data[i]+b-y_data[i])**2
return total_error/float(len(x_data))
def gradient(x_data,y_data,b,k,lr,epochs):
for i in range(epochs):
b_grd=0
k_grd=0
for j in range(len(x_data)):
b_grd+=(k*x_data[i]+b-y_data[i])/len(x_data)
k_grd += (k * x_data[i] + b - y_data[i])*x_data[j]/len(x_data)
b=b-lr*b_grd
k=k-lr*b_grd
return b,k
b,k=gradient(x_data,y_data,b,k,lr,epochs)
plt.plot(x_data, y_data, 'b.')
plt.plot(x_data, k*x_data + b, 'r')
plt.show()