参考别人博客,自己实验并总结,供参考。
import torch
import torch.nn as nn
"""
比较了nn.Sequential,ModuleList与python list
构建网络的区别
"""
class net1(nn.Module):
def __init__(self):
super(net2, self).__init__()
self.conv = nn.Sequential(nn.Conv2d(3, 5, 5, 0))
# 注意,直接用list的构建方式,层与参数均不会出现在网络中
self.linears = [nn.Linear(10, 10) for i in range(2)]
def forward(self, x):
x = self.conv(x)
for m in self.linears:
x = m(x)
return x
class net2(nn.Module):
def __init__(self):
super(net2, self).__init__()
self.conv = nn.Sequential(nn.Conv2d(3, 5, 5, 0))
# 单独的层去构建则会被自动加入网络结构
self.linear1 = nn.Linear(10, 10)
self.linear2 = nn.Linear(10, 10)
def forward(self, x):
x = self.conv(x)
x = self.linear1(x)
x = self.linear2(x)
return x
class net3(nn.Module):
def __init__(self):
super(net1, self).__init__()
# 用ModuleList构建的层则会自动注册在网络结构中
self.linears = nn.ModuleList([nn.Linear(10, 10) for i in range(2)])
def forward(self, x):
"""
1.
nn.ModuleList 并没有定义一个网络,
它只是将不同的模块储存在一起,这些模块之间并没有什么先后顺序可言
x = self.linears[2](x)
x = self.linears[0](x)
x = self.linears[1](x)
2.
一个模块(层)可以被调用多次,但是被调用多次的模块,
使用的是同一组parameters,即他们是参数共享的,
"""
for m in self.linears:
x = m(x)
return x
class net4(nn.Module):
def __init__(self):
super(net4, self).__init__()
self.block = nn.Sequential(nn.Conv2d(1, 20, 5),
nn.ReLU(),
nn.Conv2d(20, 64, 5),
nn.ReLU())
"""
注意,nn.Sequential与nn.ModuleList的区别主要有两个
1.
nn.Sequential内的模块是按照顺序排列的
2.
nn.Sequential已经实现了内部的forward函数,因此可以整个block直接调用
"""
def forward(self, x):
x = self.block(x)
return x
class net5(nn.Module):
def __init__(self):
super(net5, self).__init__()
self.net1 = net2()
self.net2 = net4()
if __name__ == '__main__':
net = net5()
print(net)