UP:B站-刘二大人
原视频链接:04.反向传播_哔哩哔哩_bilibili
import torch
import numpy as np
import matplotlib.pyplot as plt
''' y = w1 * x^2 + w2 * x + b '''
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w1 = torch.tensor([1.0])
w1.requires_grad = True
w2 = torch.tensor([1.0])
w2.requires_grad = True
b = torch.tensor([1.0])
b.requires_grad = True
def forward(x):
return w1 * x ** 2 + w2 * x + b
def loss(x, y):
y_pred = forward(x)
return (y_pred - y) ** 2
print("predict (before training):", 4, forward(4).item())
epoch_list = []
loss_list = []
for epoch in range(100):
for x, y in zip(x_data, y_data):
l = loss(x, y)
l.backward()
print('\tgrad:', x, y, l, w1.grad.item(), w2.grad.item(), b.grad.item())
w1.data = w1.data - 0.01 * w1.grad.data
w2.data = w2.data - 0.01 * w2.grad.data
b.data = b.data - 0.01 * b.grad.data
w1.grad.data.zero_()
w2.grad.data.zero_()
b.grad.data.zero_()
loss_list.append(l.item())
epoch_list.append(epoch)
print('epoch:', epoch, l.item()) # 取出loss使用l.item,不要直接使用l(l是tensor会构建计算图)
print("predict (after training):", 4, forward(4).item())
plt.plot(epoch_list, loss_list)
plt.xlabel("epoch")
plt.ylabel("loss")
plt.show()