Densenet网络

Densenet网络

import torch
from torch import nn

def conv_block(in_channel,out_channel):
    layer=nn.Sequential(
        nn.BatchNorm2d(in_channel),
        nn.ReLU(),
        nn.Conv2d(in_channel,out_channel,kernel_size=3,padding=1,bias=False)
    )
    return layer

class dense_block(nn.Module):
    def __init__(self,in_channel,growth_rate,num_layers):
        super(dense_block, self).__init__()
        block=[]
        channel = in_channel
        for i in range(num_layers):
            block.append(conv_block(channel,growth_rate))
            channel += growth_rate
        self.net= nn.Sequential (*block)
    def forward(self,x):
        for layer in self.net:
            out=layer(x)
            x=torch.cat((out,x),dim=1)
        return  x

def transiton(in_channel,out_channel):
    trans_layer=nn.Sequential(
        nn.BatchNorm2d(in_channel),
        nn.ReLU(),
        nn.Conv2d(in_channel,out_channel,1),
        nn.AvgPool2d(2,2)
    )
    return trans_layer

class densenet(nn.Module):
    def __init__(self,in_channel,num_classes,growth_rate=32,block_layers=[6,12,24,16],init_weights=False):
        super(densenet,self).__init__()
        self.block1=nn.Sequential(
            # nn.Conv2d(in_channel, out_channels=64, kernel_size=7, stride=2, padding=3),
            nn.Conv2d(in_channel,64,7,2,3),
            nn.BatchNorm2d(64),
            nn.ReLU(True),
            nn.MaxPool2d(3,2,padding=1)
        )
        self.DB1 = self._make_dense_block(64,growth_rate,num=block_layers[0])
        self.TL1 = self._make_transiton_layer(256)
        self.DB2 = self._make_dense_block(128,growth_rate,num=block_layers[1])
        self.TL2 = self._make_transiton_layer(512)
        self.DB3 = self._make_dense_block(256,growth_rate,num=block_layers[2])
        self.TL3 = self._make_transiton_layer(1024)
        self.DB4 = self._make_dense_block(512,growth_rate,num=block_layers[3])
        self.global_average = nn.Sequential(
            nn.BatchNorm2d(1024),
            nn.ReLU(),
            nn.AdaptiveAvgPool2d((1,1)),
        )
        # self.classifier=nn.Linear(1024,num_classes)

        self.classifier = nn.Sequential(
            nn.Linear(1024, 512),
            nn.ReLU(inplace=True),
            nn.Dropout(p=0.3),
            nn.Linear(512,256),
            nn.ReLU(inplace=True),
            nn.Dropout(p=0.4),
            nn.Linear(256,num_classes),
        )

        if init_weights:
            self._initialize_weights()

    def forward(self,x):
        x=self.block1(x)
        x=self.DB1(x)
        x=self.TL1(x)
        x=self.DB2(x)
        x=self.TL2(x)
        x=self.DB3(x)
        x=self.TL3(x)
        x=self.DB4(x)
        x=self.global_average(x)
        x=x.view(x.shape[0],-1)
        x=self.classifier(x)

        return x

    def _make_dense_block(self,channels,growth_rate,num):
        block = []
        block.append(dense_block(channels,growth_rate,num))
        channels += num * growth_rate
        return nn.Sequential(*block)
    def _make_transiton_layer(self,channels):
        block=[]
        block.append(transiton(channels,channels//2))
        return nn.Sequential(*block)

    def _initialize_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
                if m.bias is not None:
                    nn.init.constant_(m.bias, 0)
            elif isinstance(m, nn.Linear):
                nn.init.normal_(m.weight, 0, 0.01)
                nn.init.constant_(m.bias, 0)

net=densenet(in_channel=3,num_classes=10)
# net=densenet(3,10)
# print(net)
x=torch.rand(1,3,224,224)
print(net(x))

# for name,layer in net.named_children():
#     if name!='classifier':
#         x=layer(x)
#         print(name,'output shape:',x.shape)
#     else:
#         x=x.view(x.size(0),-1)
#         x=layer(x)
#         print(name,'output shape',x.shape)


















  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是一个简单的DenseNet网络实现代码示例: ```python import torch import torch.nn as nn class DenseBlock(nn.Module): def __init__(self, in_channels, growth_rate, num_layers): super(DenseBlock, self).__init__() self.layers = nn.ModuleList() for i in range(num_layers): self.layers.append(nn.Sequential( nn.BatchNorm2d(in_channels + i * growth_rate), nn.ReLU(inplace=True), nn.Conv2d(in_channels + i * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=False) )) def forward(self, x): features = [x] for layer in self.layers: new_features = layer(torch.cat(features, dim=1)) features.append(new_features) return torch.cat(features, dim=1) class TransitionLayer(nn.Module): def __init__(self, in_channels, out_channels): super(TransitionLayer, self).__init__() self.downsample = nn.Sequential( nn.BatchNorm2d(in_channels), nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, bias=False), nn.AvgPool2d(kernel_size=2, stride=2) ) def forward(self, x): return self.downsample(x) class DenseNet(nn.Module): def __init__(self, growth_rate=32, block_config=(6, 12, 24, 16), num_classes=1000): super(DenseNet, self).__init__() self.features = nn.Sequential( nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False), nn.BatchNorm2d(64), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2, padding=1) ) num_features = 64 for i, num_layers in enumerate(block_config): block = DenseBlock(num_features, growth_rate, num_layers) self.features.add_module('denseblock%d' % (i + 1), block) num_features = num_features + num_layers * growth_rate if i != len(block_config) - 1: trans = TransitionLayer(num_features, num_features // 2) self.features.add_module('transition%d' % (i + 1), trans) num_features = num_features // 2 self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.classifier = nn.Linear(num_features, num_classes) def forward(self, x): features = self.features(x) out = self.avgpool(features) out = out.view(features.size(0), -1) out = self.classifier(out) return out ``` 在这个实现中,我们定义了三个类:`DenseBlock`,`TransitionLayer`和`DenseNet`。`DenseBlock`实现了DenseNet中的密集块,`TransitionLayer`实现了DenseNet中的过渡层,而`DenseNet`则定义了整个网络的结构。 在`DenseNet`中,我们首先定义了一个序列模块,用于实现DenseNet的前几个卷积层和池化层。然后,我们循环遍历`block_config`中的元素,并在每个元素中定义一个密集块。如果这不是最后一个元素,则定义一个过渡层,将特征图的通道数减半,以便在下一个密集块中使用。最后,我们使用自适应平均池化层和线性分类器对特征图进行全局池化和分类。 这个实现代码可以自由修改以适应不同的数据集和任务。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值