一个卷积+全连接+sigmoid+全连接
# 定义模型
class Output_Layer(nn.Module):
def __init__(self, in_channels, out_channels):
super(Output_Layer, self).__init__()
'''
Input torch.Size([32, 64, 307, 12])
Output: torch.Size([32, 307, 12, 1])
'''
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(1,3), padding=(0, 1))
self.fc1 = nn.Linear(out_channels, out_channels)
self.sigmoid = nn.Sigmoid()
self.fc2 = nn.Linear(out_channels, out_channels)
def forward(self, x):
x = self.conv(x)
#x = x.view(x.size(0), -1) # 将卷积层的输出展平
x = x.permute(0,2,3,1)
x = self.fc1(x)
x = self.sigmoid(x)
x = self.fc2(x)
return x
两个1*1卷积构成的输出模块
class OutputModule(nn.Module):
def __init__(self, in_channels, out_channels):
super(OutputModule, self).__init__()
'''
Input torch.Size([32, 64, 307, 12])
Output: torch.Size([32, 307, 12, 1])
'''
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=1)
self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=1)
self.relu = nn.ReLU()
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
return x.permute(0,2,3,1)