import torch
import torch.nn.functional as F
def softmax(t):
t_exp = t.exp()
t_exp_sum = t_exp.sum(dim=1, keepdim=True)
return t_exp / t_exp_sum
a = torch.tensor([[0, 1, 0], [1, 0, 0]], dtype=torch.float32)
b = F.softmax(a, dim=1)
c = softmax(a)
print(b)
print(c)