import torch as t
import torch.nn as nn
m = nn.Softmax(dim=0)
input = t.tensor([1, 3, 3, 4, 5, 6], dtype=t.float32)
print(input)
output = m(input, )
print(output)
import math
import numpy
z = [1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0]
z_exp = [math.exp(i) for i in input]
print(z_exp)
sum_z_exp = sum(z_exp)
print(sum_z_exp) #
softmax = [round(i / sum_z_exp, 3)
for i in z_exp]
print(softmax) #
【笔记】softmax的计算方法:dim是规定计算相应维度数据的softmax
最新推荐文章于 2024-06-21 12:33:31 发布