self_attention python代码

self_attention code

from math import sqrt
import torch
import torch.nn as nn

class SA(nn.Module):
    def __init__(self, dimQ, dimK, dimV):
        super(SA, self).__init__()

        self.dimQ = dimQ
        self.dimK = dimK
        self.dimV = dimV

        # self.mid = 10
        
        self.linerQ = nn.Linear(self.dimQ, self.dimV, bias = False)
        self.linerK = nn.Linear(self.dimK, self.dimV, bias = False)
        self.linerV = nn.Linear(self.dimV, self.dimV, bias = False)

        self.sqrtD = 1 / sqrt(dimQ)
    
    def forward(self, x):
        batch, n, dim = x.shape

        assert(dim == self.dimQ)

        Q = self.linerQ(x)
        K = self.linerK(x)
        V = self.linerV(x)

        dist = torch.bmm(Q, K.transpose(1, 2)) * self.sqrtD
        W = torch.softmax(dist, dim = -1)

        Output = torch.bmm(W, V)
        return Output

if __name__ == "__main__":
    x = torch.tensor([[[1,2,3],[2,3,4],[3,4,5],[4,5,6]],
                     [[1,2,3],[2,3,4],[3,4,5],[4,5,6]]], dtype = torch.float)
    print(x.shape)

    saModel = SA(3, 3, 3)
    Output = saModel(x)
    print(Output)

https://zhuanlan.zhihu.com/p/338817680
https://blog.csdn.net/weixin_44750512/article/details/124244915
https://blog.csdn.net/qq_40178291/article/details/100302375

  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

星光技术人

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值