attention_GAM
GAM_attention
import torch.nn as nn
import torch
class GAM_Attention(nn.Module):
def __init__(self, in_channels, out_channels, rate=4):
super(GAM_Attention, self).__init__()
self.channel_attention = nn.Sequential(
nn.L
原创
2022-01-02 22:09:59 ·
1072 阅读 ·
0 评论