网络组成:卷积、激活、池化、BN、dropout、全连接层等。各层作用。
需要设定某些网络层的相关参数inchannel,outchannel,kernel_size、padding、stride等。
若输入数据维度为W*W kernelsize: F×F stride:S padding: P
可以得出卷积层输出的特征图尺寸计算:N = (W − F + 2P )/S+1
nn.MaxPool2d()函数中,参数ceil_mode默认为False表示向下取整,如果等于True,向上取整
池化层输出计算公式:dilation默认为1,如果不设置stride则默认为kernelsize大小。来源
例子:
假设输入:3*448*448
经过5次卷积要获得256*14*14 feature map(fp)
最简单方法是卷积层stride与padding都为1,核为3;池化层核为2,stride为2。
import torch.nn as nn
import torch
import numpy as np
import torch.nn.functional as F
class myNet(nn.Module):
def __init__(self):
super(myNet, self).__init__()
self.conv1=nn.Conv2d(3,32,3,1,1)
self.conv2 = nn.Conv2d(32, 64, 3,1,1)
self.conv3 = nn.Conv2d(64, 128, 3,1,1)
self.conv4 = nn.Conv2d(128, 128, 3,1,1)
self.conv5 = nn.Conv2d(128, 256, 3,1,1)
self.linear1=nn.Linear(256*14*14,4096)
self.linear2=nn.Linear(4096,4096)
self.linear3=nn.Linear(4096,10)
def forward(self,x):
x = F.max_pool2d(F.relu(self.conv1(x)),(2,2))
x = F.max_pool2d(F.relu(self.conv2(x)), (2, 2))
x = F.max_pool2d(F.relu(self.conv3(x)), (2, 2))
x = F.max_pool2d(F.relu(self.conv4(x)), (2, 2))
x = F.max_pool2d(F.relu(self.conv5(x)), (2, 2))
print(x.size())
x = x.view(x.size()[0], -1)
x = self.linear1(x)
print(x.shape)
x = self.linear2(x)
print(x.shape)
x = self.linear3(x)
return x
inputs=torch.randn(1,3,448,448)
model=myNet()
#打印各层网络参数信息
print(model)
print(model(inputs).shape)
"""
输出信息:
myNet(
(conv1): Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv2): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv3): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv4): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv5): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(linear1): Linear(in_features=50176, out_features=4096, bias=True)
(linear2): Linear(in_features=4096, out_features=4096, bias=True)
(linear3): Linear(in_features=4096, out_features=10, bias=True)
)
torch.Size([1, 256, 14, 14])
torch.Size([1, 4096])
torch.Size([1, 4096])
torch.Size([1, 10])
"""
待续。。。