import torch
from .efficientnet_pytorch2.model import EfficientNet
import torch.nn as nn
from torch.nn import functional as F
import warnings
from .aspp import ASPP, DenseASPP
from torchvision import models
from .ocrnet import ContextAttention
from .da_attention import CAM_Module, PAM_Module
from .attention import PSAModule
from .aligend import CAB_FFM
from .aligend import CAB4 as CAB
from .aligend import CAB_low, CAB_high, CAB_high2
from .aligend import CAB4 as CAB
warnings.filterwarnings(action=‘ignore’)
torch.backends.cudnn.benchmark = True
class resnet101(torch.nn.Module):
def init(self, pretrained=True):
super().init()
self.features = models.resnet101(pretrained=pretrained)
self.conv1 = self.features.conv1
self.bn1 = self.features.bn1
self.relu = self.features.relu
self.maxpool1 = self.features.maxpool
self.layer1 = self.features.layer1
self.layer2 = self.features.layer2
self.layer3 = self.features.layer3
self.layer4 = self.features.layer4
def forward(self, input):
x = self.conv1(input)
x = self.relu(self.bn1(x))
x = self.maxpool1(x)
feature1 = self.layer1(x) # 1 / 4
feature2 = self.layer2(feature1) # 1 / 8
feature3 = self.layer3(feature2) # 1 / 16
feature4 = self.layer4(feature3) # 1 / 32
# global average pooling to build tail
tail = torch.mean(feature4, 3, keepdim=True)
tail = torch.mean(tail, 2, keepdim=True)
return feature1, feature2, feature3, feature4
class ConvBNReLU(nn.Module):
def init(self, in_chan, out_chan, ks=3, stride=1, padding=1, *args, **kwargs):
super(ConvBNReLU, self).init()
self.conv = nn.Conv2d(in_chan,
out_chan,
kernel_size=ks,
stride=stride,
padding=padding,
bias=False)
# self.bn = BatchNorm2d(out_chan)
# self.bn = BatchNorm2d(out_chan, activation=‘none’)
self.relu = nn.ReLU()
self.init_weight()
def forward(self, x):
x = self.conv(x)
# x = self.bn(x)
x = self.relu(x)
return x
def init_weight(self):
for ly in self.children():
if isinstance(ly, nn.Conv2d):
nn.init.kaiming_normal_(ly.weight, a=1)
if not ly.bias is None: nn.init.constant_(ly.bias, 0)
class BiSeNetOutput(nn.Module):
def init