import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
for_mat = 'Advertising.csv'
advertising = pd.read_csv(for_mat)
advertising.head()
x, y = 0.5, 0.8
"""绘制RSS关于w的函数图像"""
def plo_t():
w = 3
pred = x * w
rss = ((pred - y) ** 2) / 2
grad = (pred - y) * x
print("当w=3时,预测值为" + str(pred))
print("当w=3时,残差平方和为" + str(rss))
print("当w=3时,RSS(w)的梯度为" + str(grad))
w_vec = np.linspace(-1, 4, 100)
rss_vec = []
for w_tmp in w_vec:
rss_tmp = (y - x * w_tmp) ** 2 / 2
rss_vec.append(rss_tmp)
"""
画出残差平方和随着权重变化的曲线
当w=3时,画出RSS(w)的斜率
"""
plt.plot(w_vec, rss_vec)
# 画出w=3时对应RSS的散点图
plt.scatter(w, rss, s=100, c="y", marker="o")
# 通过当w=3时的切线
plt.plot(np.linspace(2.5, 3.5, 50), np.linspace(2.5, 3.5, 50) * 0.35 - 0.805, "--", linewidth=2.0)
plt.xlabel("w", fontsize=16)
plt.ylabel("RSS", fontsize=16)
plt.show()
# plo_t() #调用绘制函数
"""算法3.1 梯度下降算法"""
w = 0
lr = 0.5
pred = x * w
loss = ((pred - y) ** 2) / 2
grad = (pred - y) * x
print("自变量的值:" + str(x))
print("真实因变量:" + str(y))
print("初始权重:" + str(w))
print("初始预测值:" + str(pred))
print("初始误差:" + str(loss))
print("初始梯度:" + str(grad))
"""
定义迭代函数
更新迭代后的预测值,预测误差,梯度
"""
def ite_re(w_, lr_, grad_, count):
count = int(count)
for i in range(1, count):
w_ = w_ - lr_ * grad_
pred_ = x * w_
loss_ = ((pred_ - y) ** 2) / 2
grad_ = (pred_ - y) * x
print(f"第{i}次更新后的权重:" + str(w_))
print(f"第{i}次更新后的预测值:" + str(pred_))
print(f"第{i}次更新后的误差:" + str(loss_))
print(f"第{i}次更新后的梯度:" + str(grad_))
print("\n\n")
# ite_re(w, lr, grad, 20) # 迭代更新20次
"""算法3.2 使用随机梯度下降法迭代更新w"""
def a():
"""
对自变量矩阵x,因变量向量y
对数据进行标准化和中心化得到scaled_x和centered_y
"""
x_ = advertising.iloc[:, 0:2].values
y_ = advertising.iloc[:, 3].values
scaled_x = (x_ - np.mean(x_, axis=0, keepdims=True)) / np.std(x_, axis=0, keepdims=True)
centered_y = y_ - np.mean(y_)
lr_ = 0.1
w_ = np.zeros(2)
w_record = [w_.copy()]
for item in range(5):
total_loss = 0
for i in range(len(scaled_x)):
pred_ = np.sum(scaled_x[i] * w_)
total_loss += ((pred_ - centered_y[i]) ** 2) / 2
delta = (pred_ - centered_y[i])
w_ -= lr_ * (delta * scaled_x[i])
w_record.append(w_.copy())
c = total_loss / (i + 1)
print(c)
print(w_)
a()
3.1.2随机梯度下降法
于 2023-04-23 23:10:46 首次发布