Gradient Descent
# 梯度下降法
# 1. 准备数据集
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
# 初始化权重
w = 1.0
# 2. 定义网络
def forward(x):
return x * w
# 3. 构建损失函数
def loss(xs, ys):
cost = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
cost += (y_pred - y) ** 2
return cost / len(xs)
# 计算梯度
def gradient(xs, ys):
grad = 0
for x, y in zip(xs, ys):
grad += 2 * x * (x * w - y)
return grad / len(xs)
print('Predict (Before training)', 4 ,forward(4))
# 4. 训练循环
for epoch in range(100):
# 前向传播
cost_val = loss(x_data, y_data)
# 反向传播
grad_val = gradient(x_data, y_data)
# 更新参数
w -= 0.01 * grad_val
print('Epoch:', epoch, 'w = ', w, 'loss = ', cost_val)
print('Predict (After training)', 4, forward(4))