1.pytorch中,有些函数参数中的dim指的是维度,表示在维度dim上进行函数操作,
例如:
函数softmax输出的是所给矩阵的概率分布;
a=torch.rand(3,16,20)
b=F.softmax(a,dim=0)
c=F.softmax(a,dim=1)
d=F.softmax(a,dim=2)
b输出的是在dim=0维上的概率分布,b[0][5][6]+b[1][5][6]+b[2][5][6]=1
In [1]: import torch as t
In [2]: import torch.nn.functional as F
In [4]: a=t.Tensor(3,4,5)
In [5]: b=F.softmax(a,dim=0)
In [6]: c=F.softmax(a,dim=1)
In [7]: d=F.softmax(a,dim=2)
In [8]: a
Out[8]:
tensor([[[-0.1581, 0.0000, 0.0000, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000],
[-0.0344, 0.0000, -0.0344, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000]],
[[-0.0344, 0.0000, -0.0344, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000],
[-0.0344, 0.0000, -0.0344, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000]],
[[-0.0344, 0.0000, -0.0344, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000],
[-0.0344, 0.0000, -0.0344, 0.0000, -0.0344],
[ 0.0000, -0.0344, 0.0000, -0.0344, 0.0000]]])
In [9]: b
Out[9]:
tensor([[[0.3064, 0.3333, 0.3410, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333]],
[[0.3468, 0.3333, 0.3295, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333]],
[[0.3468, 0.3333, 0.3295, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333],
[0.3333, 0.3333, 0.3333, 0.3333, 0.3333]]])
In [10]: b.sum()
Out[10]: tensor(20.0000)
In [11]: b[0][0][0]+b[1][0][0]+b[2][0][0]
Out[11]: tensor(1.0000)
In [12]: c.sum()
Out[12]: tensor(15.)
In [13]: c
Out[13]:
tensor([[[0.2235, 0.2543, 0.2521, 0.2543, 0.2457],
[0.2618, 0.2457, 0.2521, 0.2457, 0.2543],
[0.2529, 0.2543, 0.2436, 0.2543, 0.2457],
[0.2618, 0.2457, 0.2521, 0.2457, 0.2543]],
[[0.2457, 0.2543, 0.2457, 0.2543, 0.2457],
[0.2543, 0.2457, 0.2543, 0.2457, 0.2543],
[0.2457, 0.2543, 0.2457, 0.2543, 0.2457],
[0.2543, 0.2457, 0.2543, 0.2457, 0.2543]],
[[0.2457, 0.2543, 0.2457, 0.2543, 0.2457],
[0.2543, 0.2457, 0.2543, 0.2457, 0.2543],
[0.2457, 0.2543, 0.2457, 0.2543, 0.2457],
[0.2543, 0.2457, 0.2543, 0.2457, 0.2543]]])
In [14]: n=t.rand(3,4)
In [15]: n
Out[15]:
tensor([[0.2769, 0.3475, 0.8914, 0.6845],
[0.9251, 0.3976, 0.8690, 0.4510],
[0.8249, 0.1157, 0.3075, 0.3799]])
In [16]: m=t.argmax(n,dim=0)
In [17]: m
Out[17]: tensor([1, 1, 0, 0])
In [18]: p=t.argmax(n,dim=1)
In [19]: p
Out[19]: tensor([2, 0, 0])
In [20]: d.sum()
Out[20]: tensor(12.0000)
In [22]: d
Out[22]:
tensor([[[0.1771, 0.2075, 0.2075, 0.2075, 0.2005],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027],
[0.1972, 0.2041, 0.1972, 0.2041, 0.1972],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027]],
[[0.1972, 0.2041, 0.1972, 0.2041, 0.1972],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027],
[0.1972, 0.2041, 0.1972, 0.2041, 0.1972],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027]],
[[0.1972, 0.2041, 0.1972, 0.2041, 0.1972],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027],
[0.1972, 0.2041, 0.1972, 0.2041, 0.1972],
[0.2027, 0.1959, 0.2027, 0.1959, 0.2027]]])
In [23]: d[0][0].sum()
Out[23]: tensor(1.)