ResNet Pytorch实现

ResNet Pytorch实现

ResNet通过引入跨连接层解决了梯度回传消失的问题
如下图所示:
在这里插入图片描述
这就是普通的网络连接跟跨层残差连接的对比图,使用普通的连接,上层的梯度必须要一层一层的传回来,而使用残差链接,相当于中间有了一条更短的路,梯度能够从这条更短的路传回来,这避免了梯度过小的情况。

残差网络的结构就是上面这种残差快的堆叠,residual block实现如下:

class residual_block(nn.Module):
    def __init__(self, in_channel, out_channel, same_shape=True):
        super(residual_block, self).__init__()
        self.same_shape = same_shape
        stride = 1 if self.same_shape else 2

        self.conv1 = conv3x3(in_channel, out_channel, stride=stride)
        self.bn1 = nn.BatchNorm2d(out_channel)

        self.conv2 = conv3x3(out_channel, out_channel)
        self.bn2 = nn.BatchNorm2d(out_channel)
        if not self.same_shape:
            self.conv3 = nn.Conv2d(in_channel, out_channel, 1, stride=stride)

    def forward(self, x):
        out = self.conv1(x)
        out = F.relu(self.bn1(out), True)
        out = self.conv2(out)
        out = F.relu(self.bn2(out), True)

        if not self.same_shape:
            x = self.conv3(x)
        return F.relu(x + out, True)

整个ResNet就是residual block模块的堆叠,代码如下:

class resnet(nn.Module):
    def __init__(self, in_channel, num_classes, verbose=False):
        super(resnet, self).__init__()
        self.verbose = verbose

        self.block1 = nn.Conv2d(in_channel, 64, 7, 2)

        self.block2 = nn.Sequential(
            nn.MaxPool2d(3, 2),
            residual_block(64, 64),
            residual_block(64, 64)
        )

        self.block3 = nn.Sequential(
            residual_block(64, 128, False),
            residual_block(128, 128)
        )

        self.block4 = nn.Sequential(
            residual_block(128, 256, False),
            residual_block(256, 256)
        )

        self.block5 = nn.Sequential(
            residual_block(256, 512, False),
            residual_block(512, 512),
            nn.AvgPool2d(3)
        )

        self.classifier = nn.Linear(512, num_classes)

    def forward(self, x):
        x = self.block1(x)
        if self.verbose:
            print('block	1	output:	{}'.format(x.shape))
        x = self.block2(x)
        if self.verbose:
            print('block	2	output:	{}'.format(x.shape))
        x = self.block3(x)
        if self.verbose:
            print('block	3	output:	{}'.format(x.shape))
        x = self.block4(x)
        if self.verbose:
            print('block	4	output:	{}'.format(x.shape))
        x = self.block5(x)
        if self.verbose:
            print('block	5	output:	{}'.format(x.shape))
        x = x.view(x.shape[0], -1)
        x = self.classifier(x)
        return x
ResNet(Residual Network)是一种非常流行的深度卷积神经网络模型,它在解决深层网络训练过程中的梯度消失和梯度爆炸问题上具有很好的效果。下面是ResNetPyTorch中的实现方式: 1. 导入所需的库和模块: ```python import torch import torch.nn as nn import torch.nn.functional as F ``` 2. 定义基本的残差块(Residual Block): ```python class ResidualBlock(nn.Module): def __init__(self, in_channels, out_channels, stride=1): super(ResidualBlock, self).__init__() self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(out_channels) self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(out_channels) self.shortcut = nn.Sequential() if stride != 1 or in_channels != out_channels: self.shortcut = nn.Sequential( nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(out_channels) ) def forward(self, x): residual = x out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(residual) out = F.relu(out) return out ``` 3. 定义ResNet模型: ```python class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=10): super(ResNet, self).__init__() self.in_channels = 64 self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(64) self.layer1 = self.make_layer(block, 64, num_blocks[0], stride=1) self.layer2 = self.make_layer(block, 128, num_blocks[1], stride=2) self.layer3 = self.make_layer(block, 256, num_blocks[2], stride=2) self.layer4 = self.make_layer(block, 512, num_blocks[3], stride=2) self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = nn.Linear(512, num_classes) def make_layer(self, block, out_channels, num_blocks, stride): layers = [] layers.append(block(self.in_channels, out_channels, stride)) self.in_channels = out_channels for _ in range(1, num_blocks): layers.append(block(out_channels, out_channels)) return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = self.layer4(out) out = self.avg_pool(out) out = out.view(out.size(0), -1) out = self.fc(out) return out ``` 4. 创建ResNet模型实例: ```python def ResNet18(): return ResNet(ResidualBlock, [2, 2, 2, 2]) ``` 这样就完成了ResNetPyTorch中的实现。你可以根据自己的需求进行修改和扩展。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值