问题描述
错误代码:
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# nn.Conv2d(input_channel, output_channel, kernel, stride)
self.conv1 = nn.Conv2d(3,64,5,1,1) # 64个5*5的filter -> 64个124*124的matrix
self.conv2 = nn.Conv2d(64,128,5,1,1)
self.conv3 = nn.Conv2d(128,256,5,1,1)
self.conv4 = nn.Conv2d(256,256,5,1,1)
self.conv4_drop = nn.Dropout2d()
self.fc1 = nn.Linear(4*4*256, 3072) # 全连接层 4*4*256=4096
self.fc2 = nn.Linear(3072, 2048)
self.fc3 = nn.Linear(2048, 1024)
self.fc4 = nn.Linear(1024, 256)
self.fc5 = nn.Linear(256, 11)
def forward(self, x):
# maxpooling 1
x = self.conv1(x)
x = F.relu(x) # 124*124*64
x = F.max_pool2d(x, 2) # 62*62*20
# maxpooling 2
x = self.conv2(x)
x = F.relu(x) # 58*58*128
x = F.max_pool2d(x, 2) # 29*29*40
# maxpooling 3
x = self.conv3(x)
x = F.relu(x) # 25*25*256
x = F.max_pool2d(x, 2) # 12*12*100
# maxpooling 4
x = self.conv4(x)
x = F.relu(x) # 8*8*256
x = F.max_pool2d(x, 2) # 4*4*256
# view函数将张量x变形成一维向量形式,总特征数不变,为全连接层做准备
x = x.view(-1,4*4*256)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = F.relu(self.fc3(x))
x = self.fc4(x)
return F.LogSoftmax(x)
错误原因:
x = x.view(x.size()[0], -1)
改为:
x = x.view(-1,4x4x256)
self.fc1 = nn.Linear(4x4x256, 3072)
x.view的第二个参数和nn.Linear第一个参数一致