先贴一下代码
import torch as t
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class Net(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1,6,5) #输入1 六个通道 卷积核大小是5*5
self.conv2 = nn.Conv2d(6,16,5)
self.fc1 = nn.Linear(16*5*5,120)
self.fc2 = nn.Linear(120,84)
self.fc3 = nn.Linear(84,10)
def forward(self,x):
#卷积激活池化
x = F.max_pool2d(F.relu(self.conv1(x)),2)
x = F.max_pool2d(F.relu(self.conv2(x)),2)
x = x.view(1,-1)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
net = Net()
print(net)
input = Variable(t.randn(1,1,32,32))
out = net(input)
print(out)
#梯度清0
net.zero_grad()
out.size()
out.backward(Variable(t.ones(1,10)),retain_graph = True)
首先说一下我遇到的问题吧,在最后our.backward出现错误