梯度下降算法代码如下:(增加绘图代码)
import matplotlib.pyplot as plt
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w = 1.0 # 设置初始权重
def forward(x): # 定义前馈函数
return x * w
def cost(xs, ys): # 定义损失函数
cost = 0
for x, y in zip(xs, ys):
y_pred = forward(x) # 计算预测的y值
cost += (y_pred - y) ** 2 # 计算损失函数
return cost / len(xs) # 计算平均损失
def gradient(xs, ys): # 定义梯度函数
grad = 0
for x, y in zip(xs, ys):
grad += 2 * x * (x * w - y) # 对损失函数求导得到梯度,针对x和y取不同值有一个累加过程
return grad / len(xs)
epoch_list = []
cost_list = []
print('Predict (before training)', 4, forward(4))
for epoch in range(100):
cost_val = cost(x_data, y_data)
grad_val = gradient(x_data, y_data)
w -= 0.01 * grad_val # 0.01是学习率,通过这个核心算式进行迭代
print('Epoch:', epoch, 'w=', w, 'cost=', cost_val)
epoch_list.append(epoch)