import numpy as np
def gradien_desc(theta, X, y, alpha, iters): # 系数矩阵、自变量矩阵、因变量矩阵、收敛系数和迭代次数
loss_his = np.zeros(iters)
theta_his = np.zeros((iters,2))
m = len(y)
for i in range(iters):
y_hat = np.dot(theta, X)
theta -= alpha/m*np.dot(y_hat - y,X.T)# 系数更新
loss = 1/(2*m)*np.sum(np.square(y_hat-y))
theta_his[i,:] = theta
loss_his[i] = loss
return theta, theta_his, loss_his
python实现梯度下降
最新推荐文章于 2023-07-30 20:53:44 发布