自己实现resnet18网络结构


import torch
import torch.nn as nn

# 定义一个Residual模块
class Residual(nn.Module):
    def __init__(self,in_channels,out_channels,stride=1):
        super(Residual, self).__init__()
        self.stride = stride
        self.conv1 = nn.Conv2d(in_channels,out_channels,kernel_size=3,stride=stride,padding=1)
        self.bn1 = nn.BatchNorm2d(out_channels)
        self.relu = nn.ReLU(inplace=True)
        self.conv2 = nn.Conv2d(out_channels,out_channels,kernel_size=3,padding=1)
        self.bn2 = nn.BatchNorm2d(out_channels)

        # 核心内容,残差块,x卷积后shape发生变化,
        if in_channels != out_channels:  # 如果输入的通道和输出的通道数不一样,则使用1×1的卷积残差块,也就是shortcut
            self.conv1x1 = nn.Conv2d(in_channels,out_channels,kernel_size=1,stride=stride)
            self.bn = nn.BatchNorm2d(out_channels)
        else:
            self.conv1x1 = None
    def forward(self,x):
        o1 = self.relu(self.bn1(self.conv1(x)))
        o2 = self.bn2(self.conv2(o1))
        if self.conv1x1:
            x = self.bn(self.conv1x1(x))
        out = self.relu(o2 + x)
        return out

# 自己定义一个resnet18网络
class ResNet18(nn.Module):
    def __init__(self,in_channels,num_classes):
        super(ResNet18, self).__init__()
        self.layer0 = nn.Sequential(
            nn.Conv2d(in_channels,64,kernel_size=7,stride=2,padding=3,bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
        )
        self.layer1 = nn.Sequential(
            Residual(64,64),
            Residual(64,64)
        )
        self.layer2 = nn.Sequential(
            Residual(64,128,stride=2),
            Residual(128,128)
        )
        self.layer3 = nn.Sequential(
            Residual(128,256,stride=2),
            Residual(256,256)
        )
        self.layer4 = nn.Sequential(
            Residual(256,512,stride=2),
            Residual(512,512)
        )
        self.avgpool = nn.AdaptiveAvgPool2d(output_size=(1,1))
        self.fc = nn.Linear(512,1000)
        self.classifier = nn.Sequential(
            nn.Linear(1000,64),
            nn.ReLU(True),
            nn.Dropout(p=0.5,inplace=False),
            nn.Linear(64,num_classes)
        )
    def forward(self,x):
        out0 = self.layer0(x)
        out1 = self.layer1(out0)
        out2 = self.layer2(out1)
        out3 = self.layer3(out2)
        out4 = self.layer4(out3)

        out = self.avgpool(out4)
        out = out.view((x.shape[0],-1))

        out = self.fc(out)

        out = self.classifier(out)

        return out
resnet18 = ResNet18(3,10)
print(resnet18)

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是ResNet18网络结构的代码实现: ``` import torch.nn as nn import torch.nn.functional as F class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes != self.expansion*planes: self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class ResNet18(nn.Module): def __init__(self, num_classes=10): super(ResNet18, self).__init__() self.in_planes = 64 self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(64) self.layer1 = self._make_layer(64, 2, stride=1) self.layer2 = self._make_layer(128, 2, stride=2) self.layer3 = self._make_layer(256, 2, stride=2) self.layer4 = self._make_layer(512, 2, stride=2) self.linear = nn.Linear(512*BasicBlock.expansion, num_classes) def _make_layer(self, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(BasicBlock(self.in_planes, planes, stride)) self.in_planes = planes * BasicBlock.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = self.layer4(out) out = F.avg_pool2d(out, 4) out = out.view(out.size(0), -1) out = self.linear(out) return out ``` 这里定义了两个类:BasicBlock和ResNet18。 BasicBlock是ResNet18网络中的基本模块,包含两个卷积层和一个残差连接,用于构建ResNet18的各个层。 ResNet18是一个标准的ResNet18网络结构,包含一个卷积层,四个残差块,和一个全连接层。其中,每个残差块由多个BasicBlock组成,用于提高网络的深度和准确率。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值