cost = 1/2(p-y)^2
gredient = (p-y)*x
注意和逻辑回归的区分
import numpy as np
np.random.seed(10)
x_data = np.random.rand(10,2)
m, n = np.shape(x_data)
y_data = x_data[:, -1]
theta = np.ones(n)
def gradientDescent(iter, x, y, w, lr):
x_train = x.transpose()
for i in range(0, iter):
pre = np.dot(x, w)
loss = (pre - y)
gradient = np.dot(x_train, loss) / m
w = w - lr * gradient
cost = 1.0 / 2 * m * np.sum(np.square(np.dot(x, np.transpose(w)) - y))
print("第{}次梯度下降损失为: {}".format(i,round(cost,2)))
return w
result = gradientDescent(1000, x_data, y_data, theta, 0.01)
y_pre = np.dot(x_data, result)
print("线性回归模型 w: ", result)