Pytorch实现DenseNet

nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

)

class _TransitionLayer(nn.Module):

def init(self, inplace, plance):

super(_TransitionLayer, self).init()

self.transition_layer = nn.Sequential(

nn.BatchNorm2d(inplace),

nn.ReLU(inplace=True),

nn.Conv2d(in_channels=inplace,out_channels=plance,kernel_size=1,stride=1,padding=0,bias=False),

nn.AvgPool2d(kernel_size=2,stride=2),

)

def forward(self, x):

return self.transition_layer(x)

class _DenseLayer(nn.Module):

def init(self, inplace, growth_rate, bn_size, drop_rate=0):

super(_DenseLayer, self).init()

self.drop_rate = drop_rate

self.dense_layer = nn.Sequential(

nn.BatchNorm2d(inplace),

nn.ReLU(inplace=True),

nn.Conv2d(in_channels=inplace, out_channels=bn_size * growth_rate, kernel_size=1, stride=1, padding=0, bias=False),

nn.BatchNorm2d(bn_size * growth_rate),

nn.ReLU(inplace=True),

nn.Conv2d(in_channels=bn_size * growth_rate, out_channels=growth_rate, kernel_size=3, stride=1, padding=1, bias=False),

)

self.dropout = nn.Dropout(p=self.drop_rate)

def forward(self, x):

y = self.dense_layer(x)

if self.drop_rate > 0:

y = self.dropout(y)

return torch.cat([x, y], 1)

class DenseBlock(nn.Module):

def init(self, num_layers, inplances, growth_rate, bn_size , drop_rate=0):

super(DenseBlock, self).init()

layers = []

for i in range(num_layers):

layers.append(_DenseLayer(inplances + i * growth_rate, growth_rate, bn_size, drop_rate))

self.layers = nn.Sequential(*layers)

def forward(self, x):

return self.layers(x)

class DenseNet(nn.Module):

def init(self, init_channels=64, growth_rate=32, blocks=[6, 12, 24, 16],num_classes=1000):

super(DenseNet, self).init()

bn_size = 4

drop_rate = 0

self.conv1 = Conv1(in_planes=3, places=init_channels)

num_features = init_channels

self.layer1 = DenseBlock(num_layers=blocks[0], inplances=num_features, growth_rate=growth_rate, bn_size=bn_size, drop_rate=drop_rate)

num_features = num_features + blocks[0] * growth_rate

self.transition1 = _TransitionLayer(inplace=num_features, plance=num_features // 2)

num_features = num_features // 2

self.layer2 = DenseBlock(num_layers=blocks[1], inplances=num_features, growth_rate=growth_rate, bn_size=bn_size, drop_rate=drop_rate)

num_features = num_features + blocks[1] * growth_rate

self.transition2 = _TransitionLayer(inplace=num_features, plance=num_features // 2)

num_features = num_features // 2

self.layer3 = DenseBlock(num_layers=blocks[2], inplances=num_features, growth_rate=growth_rate, bn_size=bn_size, drop_rate=drop_rate)

num_features = num_features + blocks[2] * growth_rate

self.transition3 = _TransitionLayer(inplace=num_features, plance=num_features // 2)

num_features = num_features // 2

self.layer4 = DenseBlock(num_layers=blocks[3], inplances=num_features, growth_rate=growth_rate, bn_size=bn_size, drop_rate=drop_rate)

num_features = num_features + blocks[3] * growth_rate

self.avgpool = nn.AvgPool2d(7, stride=1)

self.fc = nn.Linear(num_features, num_classes)

def forward(self, x):

x = self.conv1(x)

x = self.layer1(x)

x = self.transition1(x)

x = self.layer2(x)

x = self.transition2(x)

x = self.layer3(x)

x = self.transition3(x)

x = self.layer4(x)

x = self.avgpool(x)

x = x.view(x.size(0), -1)

x = self.fc(x)

return x

def DenseNet121():

return DenseNet(init_channels=64, growth_rate=32, blocks=[6, 12, 24, 16])

def DenseNet169():

return DenseNet(init_channels=64, growth_rate=32, blocks=[6, 12, 32, 32])

def DenseNet201():

return DenseNet(init_channels=64, growth_rate=32, blocks=[6, 12, 48, 32])

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值