1 softmax
import torch
import torch.nn.functional as F
x1= torch.Tensor( [ [1,2,3,4],[1,3,4,5],[3,4,5,6]])
y11= F.softmax(x1, dim = 0) #对每一列进行softmax
y12 = F.softmax(x,dim =1) #对每一行进行softmax
x2 = torch.Tensor([-1,2,3,4])
x3 = torch.Tensor([1,2,3,4])
y2 = F.softmax(x2,dim=0)
y3 = F.softmax(x3,dim=0)
print(y2)
print(y3)
2 sigmoid
a = torch.ones([2,3])
print(a.shape)
print(a)
b = torch.sigmoid(a)
print(b)
3 relu
y = max(0,x)
a = torch.tensor([0,1,0.3,-1])
b = F.relu(a)
print(b)