目标检测中常用的激活函数代码实现-pytorch

#-------------------------------------------------#
#   MISH激活函数
#-------------------------------------------------#
class Mish(nn.Module):
    def __init__(self):
        super(Mish, self).__init__()

    def forward(self, x):
        return x * torch.tanh(F.softplus(x))
#-------------------------------------------------#
#   SiLU激活函数
#-------------------------------------------------#
# SiLU https://arxiv.org/pdf/1606.08415.pdf ----------------------------------------------------------------------------
class SiLU(nn.Module):  # export-friendly version of nn.SiLU()
    def __init__(self):
        super(SiLU, self).__init__()
    def forward(self,x):
        return x * torch.sigmoid(x)

#-------------------------------------------------#
#   Hardswish激活函数
#-------------------------------------------------#
class Hardswish(nn.Module):  # export-friendly version of nn.Hardswish()
    def __init__(self):
        super(Hardswish, self).__init__()
    def forward(self,x):
        # return x * F.hardsigmoid(x)  # for TorchScript and CoreML
        return x * F.hardtanh(x + 3, 0.0, 6.0) / 6.0  # for TorchScript, CoreML and ONNX

SMU激活函数

# coding=utf-8

import torch
from torch import nn
from matplotlib import pyplot as plt

# alpha控制x<0的开合角度,alpha=0时,在x<0,y的斜率越小
#  mu控制x<0的小范围内的圆滑度
# 当alpha=0时,mu->∞,图像越趋向于ReLU

class SMU(nn.Module):
    '''
    Implementation of SMU activation.
    Shape:
        - Input: (N, *) where * means, any number of additional
          dimensions
        - Output: (N, *), same shape as the input
    Parameters:
        - alpha: hyper parameter
    References:
        - See related paper:
        https://arxiv.org/abs/2111.04682
    Examples:
        smu = SMU()
        x = torch.Tensor([0.6,-0.3])
        x = smu(x)
    '''
    def __init__(self, alpha = 0.25, mu=100000):
        '''
        Initialization.
        INPUT:
            - alpha: hyper parameter
            aplha is initialized with zero value by default
        '''
        super(SMU,self).__init__()
        self.alpha = alpha
        # initialize mu
        self.mu = torch.nn.Parameter(torch.tensor(float(mu)))
        
    def forward(self, x):
        return ((1+self.alpha)*x + (1-self.alpha)*x*torch.erf(self.mu*(1-self.alpha)*x))/2

class SMU1(nn.Module):
    '''
    Implementation of SMU-1 activation.
    Shape:
        - Input: (N, *) where * means, any number of additional
          dimensions
        - Output: (N, *), same shape as the input
    Parameters:
        - alpha: hyper parameter
    References:
        - See related paper:
        https://arxiv.org/abs/2111.04682
    Examples:
        smu1 = SMU1()
        x = torch.Tensor([0.6,-0.3])
        x = smu1(x)
    '''
    def __init__(self, alpha = 0.25):
        '''
        Initialization.
        INPUT:
            - alpha: hyper parameter
            aplha is initialized with zero value by default
        '''
        super(SMU1,self).__init__()
        self.alpha = alpha
        # initialize mu
        self.mu = torch.nn.Parameter(torch.tensor(4.352665993287951e-9)) 
        
    def forward(self, x):
        return ((1+self.alpha)*x+torch.sqrt(torch.square(x-self.alpha*x)+torch.square(self.mu)))/2

def test_SMU(x):
    smu_activation = SMU()
    print(f'SMU = {smu_activation(x)}')
    
def test_SMU1(x):
    smu1_activation=SMU1()
    print(f'SMU1 = {smu1_activation(x)}')

def test():
    x = torch.Tensor([0.6,-0.3])
    test_SMU(x)
    test_SMU1(x)

if __name__ == '__main__':
    test()

正在实现自适应激活函数,实验中。。。。。

  • 0
    点赞
  • 8
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值