import torch.nn as nn
from torch.nn import Conv2d
from collections import OrderedDict
class net1(nn.Module):
def __init__(self):
super(net1, self).__init__()
self.conv1 = Conv2d(3, 3, 1)
self.conv2 = Conv2d(3, 3, 1)
def forward(self, x):
return self.conv1(self.conv2(x))
net1 = net1()
print(net1)
class net2(nn.Module):
def __init__(self):
super(net2, self).__init__()
self.layer = nn.Sequential(
Conv2d(3, 3, 1),
Conv2d(3, 3, 1)
)
def forward(self, x):
return self.conv1(self.conv2(x))
net2 = net2()
print(net2)
class net3(nn.Module):
def __init__(self):
super(net3, self).__init__()
self.layer = nn.Sequential(OrderedDict([
('conv1', Conv2d(3, 3, 1)),
('conv2', Conv2d(3, 3, 1))
]))
def forward(self, x):
return self.conv1(self.conv2(x))
net3 = net3()
print(net3)
class net4(nn.Module):
def __init__(self):
super(net4, self).__init__()
self.layer = nn.Sequential() # nn.ModuleDict(), nn.ModuleList()
self.layer.add_module('conv1', Conv2d(3, 3, 3))
self.layer.add_module('conv2', Conv2d(3, 3, 3))
def forward(self, x):
return self.conv1(self.conv2(x))
net4 = net4()
print(net4)
class net5(nn.Module):
def __init__(self):
super(net5, self).__init__()
self.layer = nn.ModuleList([Conv2d(3, 3, 3), Conv2d(3, 3, 3)])
def forward(self, x):
return self.conv1(self.conv2(x))
net5 = net5()
print(net5)
class net6(nn.Module):
def __init__(self):
super(net6, self).__init__()
self.layer = nn.ModuleDict({
'conv1': Conv2d(3, 3, 3),
'conv2': Conv2d(3, 3, 3)
})
def forward(self, x):
return self.conv1(self.conv2(x))
net6 = net6()
print(net6)
class net7(nn.Module):
def __init__(self):
super(net7, self).__init__()
self.layer = nn.ModuleDict([
('conv1', Conv2d(3, 3, 3)),
('conv2', Conv2d(3, 3, 3))
])
def forward(self, x):
return self.conv1(self.conv2(x))
net7 = net7()
print(net7)
class net8(nn.Module):
def __init__(self):
super(net8, self).__init__()
self.add_module('conv2', nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)),
self.add_module('relu2', nn.ReLU()),
def forward(self, x):
out = super(net8, self).forward(x)
return out
net8 = net8()
print(net8)
class net8(nn.Module):
def __init__(self, drop_rate):
super(net8, self).__init__()
self.add_module('conv2', nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1)),
self.add_module('relu2', nn.ReLU()),
self.drop_rate = drop_rate
def forward(self, x):
out = super(net8, self).forward(x)
if self.drop_rate > 0:
out = F.dropout(out, p=self.drop_rate, training=self.training)
return out
net8 = net8()
print(net8)