线性回归
import torch
from torch.autograd import Variable
import torch.nn as nn
class Model(nn.Module):
'''
构造损失函数和优化器。
在SGD构造函数中对model.parameters()的调用将包含作为模型成员的两个 nn.Linear模块的可学习参数。
'''
def __init__(self):
super(Model, self).__init__()
self.linear = nn.Linear(1, 1)
def forward(self, x):
y_pred = self.linear(x)
return y_pred
def run(self):
model = Model()
criterion = nn.MSELoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
for epoch in range(500):
y_pred = model(x_data)
loss = criterion(y_pred, y_data)
print(epoch, loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
hour_var = Variable(torch.Tensor([[4.0]]))
y_pred = model(hour_var)
print('predict (after training)', 4, model(hour_var).data[0][0])
if __name__ == "__main__":
print("Life is short, You need Python!")
x_data = Variable(torch.Tensor([[1.0], [2.0], [3.0]]))
y_data = Variable(torch.Tensor([[2.0], [4.0], [6.0]]))
m = Model()
m.run()