import torch
import torch.nn as nn
import torch.optim as op
from torch.autograd import Variable
x = Variable(torch.Tensor([[1,2],[3,4],[4,2]]),requires_grad = False)
y = Variable(torch.Tensor([[3],[7],[6]]),requires_grad = False)
#model constract
def model():
net = nn.Sequential(
nn.Linear(2,4),
nn.ReLU6(),
nn.Linear(4,3),
nn.ReLU(),
nn.Linear(3,1)
)
optimizer = op.Adam(net.parameters(),lr= 0.01)
loss_fun = nn.MSELoss()
for i in range(2000):
out = net(x)
loss = loss_fun(out,y)
print(loss)
optimizer.zero_grad()
loss.backward()
optimizer.step()
print(net(x))
#save model
torch.save(net,'net.npy')
model()
一个简单的DNN网络,只有三层,输入,隐层和输出层