import numpy as np
import matplotlib.pyplot as plt
data = np.genfromtxt('data.csv',delimiter=',')
x_data = data[:,0]
y_data = data[:,1]
plt.scatter(x_data,y_data)
plt.show()
# k斜率,b截距,一元线性回归方程y = kx + b
k,b = 0,0
# 学习率lr(learning rate),epochs最大迭代次数
lr,epochs = 0.001,50
def gradient_descent_runner(x_data,y_data,b,k,lr,epochs):
m = float(len(x_data))
for i in range(epochs):
b_grad,k_grad = 0,0
for j in range(0,len(x_data)):
b_grad += (1/m) * (((k * x_data[j]) + b) - y_data[j])
k_grad += (1/m) * x_data[j] * (((k * x_data[j]) + b) - y_data[j])
# 更新b和k
b = b - (lr * b_grad)
k = k - (lr * k_grad)
return b,k
b,k = gradient_descent_runner(x_data,y_data,b,k,lr,epochs)
plt.plot(x_data,y_data,'b.')
plt.plot(x_data,k*x_data+b,'r')
plt.show()
感谢观看 期待关注
关注我,带你一起写bug
warning :未经授权,不得转载
有问题的小伙伴请在下方留言,喜欢就点个赞吧