线性回归原理:
Loss Function:
Gradient Update:
lr为学习率,w和b的梯度就是对loss function的求导【这里为手动求解导数后,放入公式,也用diff进行计算】
代码思路:
产生假数据
计算y的预测值
计算loss值及梯度计算和更新
画图展示
代码:
import numpy as np
import matplotlib.pyplot as plt
def get_fake_data(iter,show=False):
X = np.random.rand(iter)*20
noise = np.random.randn(iter)
y = 0.5 * X + noise
if show:
plt.scatter(X,y)
plt.show()
return X,y
def count_y_prediction(X, w, b):
y_pred = w*X + b
# print(y_pred)
return y_pred
def compete_error_for_given_points(y, y_pred):
error = (y - y_pred) ** 2
error = error.sum() / len(y)
# print(error)
return error
def compete_gradient_and_update(X, w, b, lr):
w_gradient = 0
b_gradient = 0
N = len(X)
for i in range(N):
w_gradient += 2 * (w * X[i] + b - y[i]) * X[i]
b_gradient += 2 *(w * X[i] + b - y[i])
w -= lr * w_gradient / N
b -= lr * b_gradient / N
return w,b
def linaerRegression(X, y, w, b, i, lr = 0.001):
# print(w,b)
y_pred = count_y_prediction(X, w, b)
error = compete_error_for_given_points(y, y_pred)
print("loss:", error)
w, b = compete_gradient_and_update(X, w, b, lr)
# print(w,b)
y_pred = count_y_prediction(X, w, b)
draw(X, y, y_pred)
def draw(X, y, y_pred,final=True):
# plt.ion()
plt.clf()
plt.scatter(X, y, c="blue")
plt.plot(X, y_pred, c="blue")
if final:
plt.pause(0.2)
# plt.close()
else:
plt.show()
if __name__ == "__main__":
iter = 50
X,y = get_fake_data(iter)
w = np.random.randn(1)
b = np.zeros(1)
plt.ion()
for i in range(30):
linaerRegression(X, y, w, b, i)
plt.ioff()
y_pred = count_y_prediction(X, w, b)
draw(X,y,y_pred,0)
结果
参考链接