用一个一元线性回归的模型带大家入门,包含了pytorch随机数据如何取、搭建一个一元线性回归模型、模型训练过程、训练好的模型或参数如何保存和调用等
测试源码test.py:
import torch as t
import torch.nn as nn
import random
import matplotlib.pyplot as plt
from torch.autograd import Variable
#获取一组具有一定规律却又有随机性的数据
x_train=t.linspace(-1,1,100).view(100,1)
y_train=5*x_train+t.rand(x_train.size())+int(random.sample(range(8,9),1)[0])#此处可以不这么写,我只是为了方便后面给大家讲解特意设计的
#定义一个神经网络模型y=wx+b
class LinearRegression(nn.Module):
def __init__(self, input_size, output_size):
super(LinearRegression, self).__init__()
self.linear = nn.Linear(input_size, output_size)
def forward(self, x):
out = self.linear(x)
return out
input_size = 1
output_size = 1
#调用整个模型不用实例化网络结构,model.pkl是我们训练好保存的模型
#model=t.load('./model.pkl')
#只调用参数需实例化网络结构(但速度快),model_parameter.pkl是我们训练好保存的参数(w,b)
model=LinearRegression(input_size,output_size)
model.load_state_dict(t.load('./model_parameter.pkl'))
#注意:无论是哪种调用方式都需要预先定义好神经网络模型,否则报错
#画图表示预测值与真实值
model.eval()#测试模式,可以省略
predicted = model(Variable(x_train)).data.numpy()#预测值
print(predicted)
plt.plot(x_train, y_train, 'bo', label='real')
plt.plot(x_train, predicted,'r*', label='predict')#'r*'省略就会默认画出一条预测值的直线
plt.legend()
plt.show()
训练源码train.py:
import torch as t
import torch.nn as nn
import random
import matplotlib.pyplot as plt
from torch.autograd import Variable
#定义超参数
input_size = 1
output_size = 1
num_epochs = 5000
learning_rate = 0.05
x_train=t.linspace(-1,1,100).view(100,1)
y_train=5*x_train+t.rand(x_train.size())+int(random.sample(range(8,9),1)[0])#此处可以不这么写,我只是为了方便后面给大家讲解特意设计的
class LinearRegression(nn.Module):
def __init__(self, input_size, output_size):
super(LinearRegression, self).__init__()
self.linear = nn.Linear(input_size, output_size)
def forward(self, x):
out = self.linear(x)
return out
model = LinearRegression(input_size, output_size)
#定义损失函数(均方误差损失函数 MSELoss)
criterion = nn.MSELoss()
#定义优化函数(随机梯度下降法 SGD)
optimizer = t.optim.SGD(model.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
inputs = Variable(x_train)
targets = Variable(y_train)
outputs = model(inputs)
loss = criterion(outputs, targets)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (epoch + 1) % 50 == 0:
print('Epoch [%d/%d], Loss: %.4f'
% (epoch + 1, num_epochs, loss.item()))
t.save(model,'./model.pkl')
t.save(model.state_dict(), 'model_parameter.pkl')
model.eval()
predicted = model(Variable(x_train)).data.numpy()
plt.plot(x_train, y_train, 'bo', label='Original Data')
plt.plot(x_train, predicted, label='Fiting Line')
plt.legend()
plt.show()
训练结果:
Epoch [5000/5000], Loss: 0.0812
测试: