import torch
import numpy as np
import matplotlib.pyplot as plt
torch.manual_seed(2017)
lr = 1e-2
# 数据
x_train = np.array(
[[3.3], [4.4], [5.5], [6.71], [6.93], [4.168], [9.779], [6.182], [7.59], [2.167], [7.042], [10.791], [5.313],
[7.997], [3.1]], dtype=np.float32)
y_train = np.array(
[[1.7], [2.76], [2.09], [3.19], [1.694], [1.573], [3.366], [2.596], [2.53], [1.221], [2.827], [3.465], [1.65],
[2.904], [1.3]], dtype=np.float32)
plt.plot(x_train, y_train, 'bo')
plt.show()
# 转换为tensor
x_train = torch.from_numpy(x_train)
y_train = torch.from_numpy(y_train)
# 初始化参数w和b
w = torch.randn((1), requires_grad=True)
b = torch.zeros((1), requires_grad=True)
print(w)
print(b)
#创建线性回归模型
def linear_model(x):
return x * w + b
# 定义损失函数
def get_loss(y_, y):
return torch.mean((y_ - y) ** 2)
for e in range(10): # 进行10次更新
y_ = linear_model(x_train)
loss = get_loss(y_, y_train)
# 自动求导
loss.backward()
# 更新权重
with torch.no_grad():
w -= lr * w.grad
b -= lr * b.grad
w.grad = None # 梯度归零
b.grad = None # 梯度归零
print('epoch: {}, loss: {}'.format(e, loss.item()))
# %%
y_ = linear_model(x_train)
plt.plot(x_train.data.numpy(), y_train.data.numpy(), 'bo', label='real')
plt.plot(x_train.data.numpy(), y_.data.numpy(), 'r-', label='estimated')
plt.legend()
plt.show()
pytorch 线性回归
最新推荐文章于 2024-08-01 21:08:58 发布