实现代码
"""resnet18的pytorch实现"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class basic_block(nn.Module):
"""基本残差块,由两层卷积构成"""
def __init__(self,in_planes,planes,kernel_size=3,stride=1):
"""
:param in_planes: 输入通道
:param planes: 输出通道
:param kernel_size: 卷积核大小
:param stride: 卷积步长
"""
super(basic_block, self).__init__()
self.conv1=nn.Conv2d(in_planes,planes,kernel_size=kernel_size,stride=stride,padding=1,bias=False)
self.bn1=nn.BatchNorm2d(planes)
self.relu=nn.ReLU()
self.conv2=nn.Conv2d(planes,planes,kernel_size=kernel_size,stride=1,padding=1,bias=False)
self.bn2=nn.BatchNorm2d(planes)
if stride!=1 or in_planes!=planes:
self.downsample=nn.Sequential(nn.Conv2d(in_planes,planes,kernel_size=1,stride=stride)
,nn.BatchNorm2d(planes))
else:
self.downsample=nn.Sequential()
def forward(self,inx):
x=self.relu(self.bn1(self.conv1(inx)))
x=self.bn2(self.conv2(x))
out=x+self.downsample(inx)
return F.relu(out)
class Resnet(nn.Module):
def __init__(self,basicBlock,blockNums,nb_classes):
super(Resnet, self).__init__()
self.in_planes=64
self.conv1=nn.Conv2d(3,self.in_planes,kernel_size=(7,7),stride=(2,2),padding=3,bias=False)
self.bn1=nn.BatchNorm2d(self.in_planes)
self.relu=nn.ReLU(inplace=True)
self.maxpool=nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
self.layer1=self._make_layers(basicBlock,blockNums[0],64,1)
self.layer2=self._make_layers(basicBlock,blockNums[1],128,2)
self.layer3=self._make_layers(basicBlock,blockNums[2],256,2)
self.layer4=self._make_layers(basicBlock,blockNums[3],512,2)
self.avgpool=nn.AdaptiveAvgPool2d(output_size=(1,1))
self.fc=nn.Linear(512,nb_classes)
def _make_layers(self,basicBlock,blockNum,plane,stride):
"""
:param basicBlock: 基本残差块类
:param blockNum: 当前层包含基本残差块的数目,resnet18每层均为2
:param plane: 输出通道数
:param stride: 卷积步长
:return:
"""
layers=[]
for i in range(blockNum):
if i==0:
layer=basicBlock(self.in_planes,plane,3,stride=stride)
else:
layer=basicBlock(plane,plane,3,stride=1)
layers.append(layer)
self.in_planes=plane
return nn.Sequential(*layers)
def forward(self,inx):
x=self.maxpool(self.relu(self.bn1(self.conv1(inx))))
x=self.layer1(x)
x=self.layer2(x)
x=self.layer3(x)
x=self.layer4(x)
x=self.avgpool(x)
x=x.view(x.shape[0],-1)
out=self.fc(x)
return out
if __name__=="__main__":
resnet18=Resnet(basic_block,[2,2,2,2],1000)
inx=torch.randn(32,3,224,224)
print(inx.shape)
outx=resnet18(inx)
print(outx.shape)
运行结果
C:\Users\admin\Anaconda3\python.exe D:/BangW/网络流量/Var-CNN-master/myCode/resnet18.py
Resnet(
(conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(inplace=True)
(maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
(layer1): Sequential(
(0): basic_block(
(conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential()
)
(1): basic_block(
(conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential()
)
)
(layer2): Sequential(
(0): basic_block(
(conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential(
(0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2))
(1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
(1): basic_block(
(conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential()
)
)
(layer3): Sequential(
(0): basic_block(
(conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential(
(0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2))
(1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
(1): basic_block(
(conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential()
)
)
(layer4): Sequential(
(0): basic_block(
(conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential(
(0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2))
(1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
)
(1): basic_block(
(conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU()
(conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(downsample): Sequential()
)
)
(avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
(fc): Linear(in_features=512, out_features=1000, bias=True)
)
input data shape: (32, 3, 224, 224)
output data shape: (32, 1000)
Process finished with exit code 0