view/reshape
In [13]: a=torch.rand(4,1,28,28)
In [14]: a.shape
Out[14]: torch.Size([4, 1, 28, 28])
In [15]: a.view(4, 28*28)
Out[15]: tensor([[0.9020, 0.5015, 0.1010, ..., 0.4946, 0.1665, 0.5352],
[0.3386, 0.5002, 0.7858, ..., 0.1595, 0.9348, 0.3489],
[0.6014, 0.9255, 0.4829, ..., 0.4963, 0.1805, 0.7925],
[0.8898, 0.2387, 0.7560, ..., 0.1687, 0.3041, 0.3767]])
//view()前后操作需要size相同
//此处合并了后三个通道
In [16]: a.view(4,28*28).shape
Out[16]: torch.Size([4, 784])
In [17]: a.view(4*28, 28).shape
0ut[17]: torch.Size([112, 28])
//此处合并了前三个通道
In [18]: a.view(4*1, 28, 28).shape
Out[18]: torch.Size([4, 28, 28])
//此处合并了前两个通道
In [19]: b=a.view(4, 784)
In [20]: b.view(4, 28, 28, 1) # Logic Bug
//b就会丢失掉原来a存储的维度信息
squeeze/unsqueeze
unsqueeze
In [34]: a.shape
Out[34]: torch.Size([4, 1, 28, 28])
In [35]: a.unsqueeze(0).shape
Out[35]: torch.Size([1, 4, 1, 28, 28])
//在0索引前插入,即第一个
In [36]: a.unsqueeze(-1).shape
Out[36]: torch.Size([4, 1, 28, 28, 1])
//在-1索引后插入,即最后一个
//正数就是在之前插入,负数就是在之后插入
In [37]: a.unsqueeze(4).shape
Out[37]: torch.Size([4, 1, 28, 28, 1])
In [38]: a.unsqueeze(-4).shape
Out[38]: torch.Size([4, 1, 1, 28, 28])
In [39]: a. unsqueeze(-5).shape
Out[39]: torch.Size([1, 4, 1, 28, 28])
In [40] : a.unsqueeze(5).shape
RuntimeError: Dimension out of range
(expected to be in range of [-5, 4], but got 5)
In [46]: a=torch.tensor([1.2,2.3])
In [47]: a.unsqueeze(-1)
Out[47]: tensor([[1.2000], [2.3000]])
In [49]: a.unsqueeze(0)
Out[49]: tensor([[1.2000, 2.3000]])
squeeze
In [60]: b.shape
0ut[60]: torch.size([1, 32, 1, 1])
In [61]: b.squeeze().shape
0ut[61]: torch.Size([32])
In [62]: b.squeeze(0).shape
Out[62]: torch.Size([32, 1, 1])
In [63]: b.squeeze(-1).shape
0ut[63]: torch.Size([1, 32, 1])
In [64]: b.squeeze(1).shape
Out[64]: torch.Size([1, 32, 1, 1])
In [65]: b.squeeze(-4).shape
0ut[65]: torch.Size([32, 1, 1])
expand/repeat
expand
只扩展,不添加数据
使用前后需要维度数相同,原需扩展维度的张量必须为1
In [68]: a=torch.rand(4, 32, 14, 14)
In [73]: b.shape
Out[73]: torch.size([1, 32, 1, 1])
In [70] : b.expand(4,32,14,14).shape
0ut[70] : torch.Size([4, 32, 14, 14])
//expand()参数为目标扩展维度张量
In [72]: b.expand(-1,32,-1,-1).shape
Out[72]: torch.Size([1, 32, 1, 1])
//-1为希望该维度的张量保持原状
In [71]:b.expand(-1,32,-1,-4).shape
Out[71]: torch.Size([1, 32, 1, -4])
//-4为输入错误,也会修改为-4但是没有实质含义
repeat
扩展,而且有数据,但与之前的相同
In [74]: b.shape
Out[74]: torch.Size([1, 32, 1, 1])
In [75]: b.repeat(4,32,1,1).shape
Out[75]: torch.Size([4, 1024, 1, 1] )
//repeat()内参数为拷贝的次数
In [76]: b.repeat(4,1,1,1).shape
Out[76]: torch.Size([4, 32, 1, 1])
In [77]: b.repeat(4,1,32,32).shape
Out[77]: torch.Size([4, 32, 32, 32])
.t()
将矩阵变为转置矩阵
transpose()
将目标的两个维度进行交换