1 model._modules.items()
model._modules.items()是一个包含模型所有子模块的迭代器。在PyTorch中,当我们定义一个nn.Module子类时,我们可以使用nn.Sequential或nn.ModuleDict等容器类将多个子模块组合成一个整体。在这种情况下,我们可以通过访问nn.Module类中的_modules属性来访问这些子模块。_modules是一个有序字典,其中键是子模块的名称,值是子模块对象
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
return x
model = MyModel()
for name, module in model._modules.items():
print(name, module)
输出如下:
conv1 Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
conv2 Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
relu ReLU()
在上面的例子中,我们可以通过迭代model._modules.items()来访问模型中的每个子模块。name是子模块的名称(例如"conv1"、“conv2"和"relu”),module是子模块的实例对象(例如nn.Conv2d和nn.ReLU)。在递归模型中,每个子模块都可以是一个递归模型,因此我们可以使用_modules属性来递归遍历整个模型的所有子模块。
2 named_modules()
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
return x
model = MyModel()
for name, module in model.named_modules():
print(name, module)
MyModel(
(conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(relu): ReLU()
)
conv1 Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
conv2 Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
relu ReLU()
3 查看每一层的参数名和参数
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
return x
model = MyModel()
for name, param in model.named_parameters():
print(name, param.shape)
conv1.weight torch.Size([16, 3, 3, 3])
conv1.bias torch.Size([16])
conv2.weight torch.Size([32, 16, 3, 3])
conv2.bias torch.Size([32])
4 named_children() 和 named_modules()
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
self.layer3 = nn.Sequential(
nn.Linear(32,10),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
x = self.layer3(x)
return x
model = MyModel()
print("named_children:")
for name, module in model.named_children():
print(name, module)
print("--------------------------------------------\n")
print("named_modules:")
for name, module in model.named_modules():
print(name, module)
named_children:
conv1 Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
conv2 Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
relu ReLU()
layer3 Sequential(
(0): Linear(in_features=32, out_features=10, bias=True)
(1): ReLU(inplace=True)
)
--------------------------------------------named_modules:
MyModel(
(conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(relu): ReLU()
(layer3): Sequential(
(0): Linear(in_features=32, out_features=10, bias=True)
(1): ReLU(inplace=True)
)
)
conv1 Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
conv2 Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
relu ReLU()
layer3 Sequential(
(0): Linear(in_features=32, out_features=10, bias=True)
(1): ReLU(inplace=True)
)
layer3.0 Linear(in_features=32, out_features=10, bias=True)
layer3.1 ReLU(inplace=True)
5 modules()
函数返回模型的所有组成模块,包括子模块的组成模块.
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
self.layer3 = nn.Sequential(
nn.Linear(32,10),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
x = self.layer3(x)
return x
model = MyModel()
for module in model.modules():
print(module)
MyModel(
(conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(relu): ReLU()
(layer3): Sequential(
(0): Linear(in_features=32, out_features=10, bias=True)
(1): ReLU(inplace=True)
)
)
Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
ReLU()
Sequential(
(0): Linear(in_features=32, out_features=10, bias=True)
(1): ReLU(inplace=True)
)
Linear(in_features=3
6 named_children() and named_parameters()
import torch.nn as nn
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.relu = nn.ReLU()
self.layer3 = nn.Sequential(
nn.Linear(32,10),
nn.Conv2d(10, 10, kernel_size=3, stride=1, padding=1)
)
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
x = self.layer3(x)
return x
model = MyModel()
for name, child_modle in model.named_children():
print(name, ":")
# print(name, child_modle, ":")
for name, param in child_modle.named_parameters():
print(" ", name, param.shape)
conv1 :
weight torch.Size([16, 3, 3, 3])
bias torch.Size([16])
conv2 :
weight torch.Size([32, 16, 3, 3])
bias torch.Size([32])
relu :
layer3 :
0.weight torch.Size([10, 32])
0.bias torch.Size([10])
1.weight torch.Size([10, 10, 3, 3])
1.bias torch.Size([10])