索引和切片
具体含义查看文档,但是需要知道下面的式子的含义
a = torch.rand(4, 3, 28, 28)
print(a)
print(a.shape)
print(a[0]) # 取第一个维度的第一个元素
print(a[0].shape)
print(a[0, 0]) # 取第一维的一个元素的第一个元素
print(a[0, 0].shape)
print(a[0, 0, 0, 4]) # 第一个值
print(a[:2])
print(a[:2, :1, :, :])
print(a[:2, 1:, :, :])
print(a[:, :, 0: 28:2, 0: 28:2]) # 0-28,不包含28
print(a[:, :, ::2, ::2])
print(a.index_select(0, torch.tensor([0, 2])))
print(a.index_select(0, torch.arange(28))) # arange生成索引,选择0-27的数据
print(a[...]) # ...即所有
变换
reshape和view建议使用reshape
expand与repeat建议使用expand
# view reshape 一幕一样,建议使用reshape
a = torch.rand(2, 2, 3)
print(a)
# tensor([[[0.0451, 0.2325, 0.6052],
# [0.0319, 0.7458, 0.2338]],
#
# [[0.9283, 0.5908, 0.8483],
# [0.7521, 0.4284, 0.3414]]])
print(a.view(2, 2 * 3)) # 需要大小不变,变换一般是已物理意义变换的
# tensor([[0.0451, 0.2325, 0.6052, 0.0319, 0.7458, 0.2338],
# [0.9283, 0.5908, 0.8483, 0.7521, 0.4284, 0.3414]])
print(a.view(2 * 2, 3)) #
# tensor([[0.0451, 0.2325, 0.6052],
# [0.0319, 0.7458, 0.2338],
# [0.9283, 0.5908, 0.8483],
# [0.7521, 0.4284, 0.3414]])
# Squeeze 减少维度 unsqueeze 增加维度
b = torch.rand(4, 1, 28, 28)
print(
b.unsqueeze(0).shape) # torch.Size([1, 4, 1, 28, 28]) 前面增加一个维度,范围来自[-a.dim()-1,a.dim()+1) =[-5,5) 位置就是在哪个位置添加一个维度
print(b.unsqueeze(1).shape) # torch.Size([4, 1, 1, 28, 28]) 在第一个位置添加维度
print(b.unsqueeze(-1).shape) # torch.Size([4, 1, 28, 28, 1]) 后面增加一个维度
c = torch.rand(1, 32, 1, 1)
print(c.squeeze().shape) # torch.Size([32]) 将所有维度为1的压缩掉
print(c.squeeze(0).shape) # torch.Size([32, 1, 1])
print(c.squeeze(1).shape) # torch.Size([1, 32, 1, 1]) 如果维度不是1,则无法挤压
# 拓展 Expand与repeat,expand先理论上拓展,实际未变化,在需要的时候才拓展,repeat复制一份,是占用内存的,推荐expand,两者效果等效
d = torch.rand(4, 32, 14, 14)
print(c.expand(4, 32, 4, 4).shape) # 拓展要求,dim不变,值为1的维度是可以直接复制拓展的,值为N的无法复制拓展
属性统计
b = torch.rand(4, 10)
tensor([[0.4445, 0.8334, 0.8154, 0.8806, 0.2000, 0.8821, 0.4191, 0.4517, 0.2489,
0.3214],
[0.5893, 0.2222, 0.3894, 0.8484, 0.8683, 0.6819, 0.9593, 0.6466, 0.9365,
0.8093],
[0.6755, 0.8073, 0.0555, 0.2461, 0.5862, 0.2128, 0.8700, 0.0190, 0.1573,
0.3891],
[0.9954, 0.1081, 0.9338, 0.8060, 0.9755, 0.7114, 0.5143, 0.5028, 0.6685,
0.6137]])
b.max(dim=1)
tensor([0.8821, 0.9593, 0.8700, 0.9954])
tensor([5, 6, 6, 0])
b.max(dim=1, keepdim=True)
tensor([[0.8821],
[0.9593],
[0.8700],
[0.9954]])
tensor([[5],
[6],
[6],
[0]])
# 求前几大
c = torch.rand(2, 3, 4)
tensor([[[0.5938, 0.2319, 0.4639, 0.6023],
[0.9291, 0.5020, 0.6345, 0.3602],
[0.0579, 0.0724, 0.3531, 0.8347]],
[[0.9376, 0.6253, 0.3874, 0.4898],
[0.6600, 0.8465, 0.7127, 0.6544],
[0.6302, 0.1681, 0.3354, 0.1012]]])
c.topk(2, dim=1)
tensor([[[0.9291, 0.5020, 0.6345, 0.8347],
[0.5938, 0.2319, 0.4639, 0.6023]],
[[0.9376, 0.8465, 0.7127, 0.6544],
[0.6600, 0.6253, 0.3874, 0.4898]]])
tensor([[[1, 1, 1, 2],
[0, 0, 0, 0]],
[[0, 1, 1, 1],
[1, 0, 0, 0]]])
# 求第几小
c = torch.rand(2, 3, 4)
tensor([[[0.6228, 0.1262, 0.2238, 0.3939],
[0.9004, 0.9928, 0.9539, 0.6829],
[0.8626, 0.4608, 0.7797, 0.0978]],
[[0.2980, 0.6111, 0.1143, 0.9096],
[0.8141, 0.9493, 0.2123, 0.0704],
[0.2574, 0.2949, 0.9303, 0.1097]]])
d.kthvalue()
tensor([[0.1262, 0.6829, 0.0978],
[0.1143, 0.0704, 0.1097]])
tensor([[1, 3, 3],
[2, 3, 3]])
d.kthvalue(1,dim = 1)
tensor([[0.6228, 0.1262, 0.2238, 0.0978],
[0.2574, 0.2949, 0.1143, 0.0704]])
tensor([[0, 0, 0, 2],
[2, 2, 0, 1]])
e > 0
tensor([[True, True, True],
[True, True, True]])
torch.gt(e,f)
tensor([[False, False, True],
[ True, True, True]])
torch.eq(e,f)
tensor([[False, False, False],
[False, False, False]])
torch.gt(e,f)
tensor([[False, False, True],
[ True, True, True]])
Where
cond = torch.rand(2, 2)
print(cond)
a = torch.rand(2, 2) * 5
print(a)
b = torch.rand(2, 2) * 10
print(b)
c = torch.where(cond > 0.5, a, b)
print(c)
tensor([[0.9940, 0.8014],
[0.2521, 0.4505]])
tensor([[4.0303, 0.6598],
[3.8790, 3.2217]])
tensor([[0.6110, 0.0841],
[9.2054, 4.6244]])
tensor([[4.0303, 0.6598],
[9.2054, 4.6244]])