使用python实现梯度下降和反向传播
import torch
import numpy as np
x_data = np.random.randint(1, 6, size=(3))
y_data = (x_data*x_data)*3.0 + x_data*2 + 2.0
print("x_data:", x_data, "y_data:", y_data)
w1 = torch.tensor([1.0], requires_grad=True)
w2 = torch.tensor([1.0], requires_grad=True)
bias = torch.tensor([1.0], requires_grad=True)
def forward(x):
return (x*x)*w1 + x*w2 + bias
def loss(x, y):
y_pred = forward(x)
return ((y_pred - y) ** 2).mean()
for epoch in range(100):
for x, y in zip(x_data, y_data):
l = loss(x, y)
l.backward()
print("grad: ", x, y, w1.grad.item(), w2.grad.item(), bias.grad.item())
w1.data = w1.data - 0.001 * w1.grad.data
w2.data = w2.data - 0.001 * w2.grad.data
bias.data = bias.data - 0.001 * bias.grad.data
w1.grad.data.zero_()
w2.grad.data.zero_()
bias.grad.data.zero_()
print("epoch: {}\tl: {}\tw1: {}\tw2: {}\tbias: {}\t".format(epoch, l.item(), w1.item(), w2.item(), bias.item()))