pytorch多项式回归
import torch
import numpy as np
from icecream import ic
from torch.autograd import Variable
import matplotlib.pyplot as plt
w_target = np.array([0.5, 3, 2.4])
b_target = np.array([0.9])
x_sample = np.arange(-3, 3.1, 0.1)
y_sample = b_target[0] + w_target[0] * x_sample + w_target[1] * x_sample ** 2 + w_target[2] * x_sample ** 3
x_train= np.stack([x_sample ** i for i in range(1,4)],axis=1)
x_train = Variable(torch.Tensor(x_train).float())
y_train = Variable(torch.from_numpy(y_sample).float().unsqueeze(1))
w = Variable(torch.randn(3, 1), requires_grad=True)
b = Variable(torch.Tensor(1), requires_grad=True)
def y_predict():
return torch.matmul(x_train, w) + b
def loss_(y_,y):
return torch.mean((y_ - y) ** 2)
lr = 0.001
for i in range(40):
y_ = y_predict()
loss = loss_(y_, y_train)
loss.backward()
w.data = w.data - lr * w.grad.data
b.data = b.data - lr * b.grad.data
w.grad.data.zero_()
b.grad.data.zero_()
if (i+1) % 2 == 0:
print(f'epoch:{i},loss:{loss.data}')
plt.plot(x_sample, y_sample, label='real curve', color='b')
plt.plot(x_sample, y_predict().data.numpy(), label='fitting curve', color='r')
plt.legend()
plt.show()