pytorch学习(十四)层结构容器

本篇文章列举了nn.Sequential,nn.ModuleList,nn.ModuleDict三个容器的使用方法,并且还学习了一种使用类封装模块的方法。通过本篇博客的学习,你将学习到三个容器和使用类构建容易的方法。

1.nn.Sequential

第一种方法直接列出每一层结构,第二种方法通过add_module的方式堆叠层,第三种方法使用字典的方式对层进行组织,第四种方法通过LIST列表的方式先堆叠层,然后再用sequential来骄傲如每一层,*表示拆散成一个一个的。

import torch
import torch.nn as nn
from collections import OrderedDict

#Sequential 1 直接添加网络层
net1 = nn.Sequential(
    nn.Conv2d(3,6,kernel_size=5,padding=2),
    nn.Conv2d(6, 12, kernel_size=3, padding=1),
    nn.BatchNorm2d(12),
    nn.ReLU()
)

print('***************Sequential 1********************')
print(net1)



#Sequential 2 先定义容器,后添加网络层
net2 = nn.Sequential()
net2.add_module("conv1", nn.Conv2d(3,6,kernel_size=5,padding=2))
net2.add_module("conv2", nn.Conv2d(6, 12, kernel_size=3, padding=1))
net2.add_module("bn1", nn.BatchNorm2d(12))
net2.add_module("relu", nn.ReLU())

print('***************Sequential 2********************')
print(net2)


#Sequential 3 使用有序字典添加网络结构
net3 = nn.Sequential(OrderedDict([
    ("conv1",nn.Conv2d(3,32,3,1,1)),
    ("conv2", nn.Conv2d(32, 32, 3, 1, 1)),
    ("Relu1", nn.ReLU()),
    ("pool", nn.MaxPool2d(2))
]))

print('****************Sequential 3*******************')
print(net3)

#Sequential 4 使用有序字典添加网络结构
layers = []
layers.append(nn.Conv2d(3,32,3,1,1))
layers.append(nn.Conv2d(32, 32, 3, 1, 1))
layers.append(nn.ReLU())
layers.append(nn.MaxPool2d(2))
net4 = nn.Sequential(*layers)
print('*****************Sequential 4******************')
print(net4)

2.nn.ModuleList

第一种方法也是直接列出来,第二种方法使用了列表的方式。


#ModuleList 如果添加了extend,虽然使用更加灵活,但是整体上不是那么直观
model = nn.ModuleList([
    nn.Conv2d(3, 6, kernel_size=5, padding=2),
    nn.Conv2d(6, 12, kernel_size=3, padding=1),
    nn.BatchNorm2d(12),
    nn.ReLU()
])
model.extend([nn.Linear(12,12) for i in range(2)])
model.extend([nn.Linear(12,10)])
print('*****************ModuleList 1******************')
print(model)

make_layers = []
def make_module(idx):
    layer = []
    layer.append(nn.Conv2d(3,16,3,1,1))
    layer.append(nn.BatchNorm2d(16))
    return nn.Sequential(*layer)

for i in range(3):
    make_layers.append(make_module(i))

net_moduleList = nn.ModuleList(make_layers)
print('*****************ModuleList 2******************')
print(net_moduleList)

3.nn.ModuleDict

使用字典的方式,在forward中通过传入参数的形式确定所采用的激活函数和模型


#ModuleDict
class MyModuleDict(nn.Module):
    def __init__(self):
        super(MyModuleDict,self).__init__()
        self.choices = nn.ModuleDict({
            "conv1":nn.Conv2d(10,12,3,1,1),
            "pool":nn.MaxPool2d(2)
        })
        self.activates = nn.ModuleDict({
            "relu":nn.ReLU(),
            "leakyRelu":nn.LeakyReLU(),
            "sigmoid":nn.Sigmoid()
        })
    def forward(self,x,choice,acti):
        x = self.choices[choice](x)
        x = self.activates[acti](x)
        return x
net_moduleDict = MyModuleDict()
print('*****************ModuleDict 1******************')
print(net_moduleDict)

4.类包裹的方法

对采用重复层(参数不同,代码相同)的局部模型,可以用一个类进行封装,每一通过传入参数来构建一个处理模块。


class ContinuousConv(nn.Module):
    def __init__(self,in_channels, out_channels, pre_batch_norm = True):
        super(ContinuousConv, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels

        if pre_batch_norm:
            self.ConvFoward = nn.Sequential(
                nn.BatchNorm2d(self.in_channels),
                nn.ReLU(),
                nn.Conv2d(self.in_channels,self.out_channels,3,padding=1),
                nn.BatchNorm2d(out_channels),
                nn.ReLU(),
                nn.Conv2d(self.out_channels,self.out_channels,3,padding=1)
            )
        else:
            self.ConvFoward = nn.Sequential(
                nn.Conv2d(self.in_channels,self.out_channels,3,padding=1),
                nn.BatchNorm2d(out_channels),
                nn.ReLU(),
                nn.Conv2d(self.out_channels,self.out_channels,3,padding=1)
            )

    def forward(self,x):
        x = self.ConvFoward(x)
        return x

class UNetPlusPlus(nn.Module):
    def __init__(self, num_class, deep_supervision=False):
        super(UNetPlusPlus,self).__init__()
        self.num_classes = num_class
        self.deep_supervision = deep_supervision
        self.filters = [64, 128,256,512,1024]

        self.conv3_1 = ContinuousConv(512*2,512,pre_batch_norm=True)
        self.conv2_2 = ContinuousConv(256*3,256,pre_batch_norm=True)
        self.conv2_1 = ContinuousConv(256*2,256,pre_batch_norm=True)
    def forward(self,x):
        x = self.conv2_1(x)
        x = self.conv3_1(x)
        return x

deep_supervision = True
model_class = UNetPlusPlus(num_class=3,deep_supervision=deep_supervision)
print(model_class)

print('*****************class wrap******************')
print(model_class)

5.总的代码

通过查看print的网络结构,可以看到每一个网络的结构

import torch
import torch.nn as nn
from collections import OrderedDict

#Sequential 1 直接添加网络层
net1 = nn.Sequential(
    nn.Conv2d(3,6,kernel_size=5,padding=2),
    nn.Conv2d(6, 12, kernel_size=3, padding=1),
    nn.BatchNorm2d(12),
    nn.ReLU()
)

print('***************Sequential 1********************')
print(net1)



#Sequential 2 先定义容器,后添加网络层
net2 = nn.Sequential()
net2.add_module("conv1", nn.Conv2d(3,6,kernel_size=5,padding=2))
net2.add_module("conv2", nn.Conv2d(6, 12, kernel_size=3, padding=1))
net2.add_module("bn1", nn.BatchNorm2d(12))
net2.add_module("relu", nn.ReLU())

print('***************Sequential 2********************')
print(net2)


#Sequential 3 使用有序字典添加网络结构
net3 = nn.Sequential(OrderedDict([
    ("conv1",nn.Conv2d(3,32,3,1,1)),
    ("conv2", nn.Conv2d(32, 32, 3, 1, 1)),
    ("Relu1", nn.ReLU()),
    ("pool", nn.MaxPool2d(2))
]))

print('****************Sequential 3*******************')
print(net3)

#Sequential 4 使用有序字典添加网络结构
layers = []
layers.append(nn.Conv2d(3,32,3,1,1))
layers.append(nn.Conv2d(32, 32, 3, 1, 1))
layers.append(nn.ReLU())
layers.append(nn.MaxPool2d(2))
net4 = nn.Sequential(*layers)
print('*****************Sequential 4******************')
print(net4)


#ModuleList 如果添加了extend,虽然使用更加灵活,但是整体上不是那么直观
model = nn.ModuleList([
    nn.Conv2d(3, 6, kernel_size=5, padding=2),
    nn.Conv2d(6, 12, kernel_size=3, padding=1),
    nn.BatchNorm2d(12),
    nn.ReLU()
])
model.extend([nn.Linear(12,12) for i in range(2)])
model.extend([nn.Linear(12,10)])
print('*****************ModuleList 1******************')
print(model)

make_layers = []
def make_module(idx):
    layer = []
    layer.append(nn.Conv2d(3,16,3,1,1))
    layer.append(nn.BatchNorm2d(16))
    return nn.Sequential(*layer)

for i in range(3):
    make_layers.append(make_module(i))

net_moduleList = nn.ModuleList(make_layers)
print('*****************ModuleList 2******************')
print(net_moduleList)

#ModuleDict
class MyModuleDict(nn.Module):
    def __init__(self):
        super(MyModuleDict,self).__init__()
        self.choices = nn.ModuleDict({
            "conv1":nn.Conv2d(10,12,3,1,1),
            "pool":nn.MaxPool2d(2)
        })
        self.activates = nn.ModuleDict({
            "relu":nn.ReLU(),
            "leakyRelu":nn.LeakyReLU(),
            "sigmoid":nn.Sigmoid()
        })
    def forward(self,x,choice,acti):
        x = self.choices[choice](x)
        x = self.activates[acti](x)
        return x
net_moduleDict = MyModuleDict()
print('*****************ModuleDict 1******************')
print(net_moduleDict)


class ContinuousConv(nn.Module):
    def __init__(self,in_channels, out_channels, pre_batch_norm = True):
        super(ContinuousConv, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels

        if pre_batch_norm:
            self.ConvFoward = nn.Sequential(
                nn.BatchNorm2d(self.in_channels),
                nn.ReLU(),
                nn.Conv2d(self.in_channels,self.out_channels,3,padding=1),
                nn.BatchNorm2d(out_channels),
                nn.ReLU(),
                nn.Conv2d(self.out_channels,self.out_channels,3,padding=1)
            )
        else:
            self.ConvFoward = nn.Sequential(
                nn.Conv2d(self.in_channels,self.out_channels,3,padding=1),
                nn.BatchNorm2d(out_channels),
                nn.ReLU(),
                nn.Conv2d(self.out_channels,self.out_channels,3,padding=1)
            )

    def forward(self,x):
        x = self.ConvFoward(x)
        return x

class UNetPlusPlus(nn.Module):
    def __init__(self, num_class, deep_supervision=False):
        super(UNetPlusPlus,self).__init__()
        self.num_classes = num_class
        self.deep_supervision = deep_supervision
        self.filters = [64, 128,256,512,1024]

        self.conv3_1 = ContinuousConv(512*2,512,pre_batch_norm=True)
        self.conv2_2 = ContinuousConv(256*3,256,pre_batch_norm=True)
        self.conv2_1 = ContinuousConv(256*2,256,pre_batch_norm=True)
    def forward(self,x):
        x = self.conv2_1(x)
        x = self.conv3_1(x)
        return x

deep_supervision = True
model_class = UNetPlusPlus(num_class=3,deep_supervision=deep_supervision)
print(model_class)

print('*****************class wrap******************')
print(model_class)

  • 8
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值