张量的操作
1 张量的拼接
1.1 torch.cat(input,dim)
将张量按照dim进行拼接,不会扩张维度
t = torch.ones((2, 3))
t_0 = torch.cat([t, t], dim=0)
t_1 = torch.cat([t, t], dim=1)
t_2 = torch.cat([t, t ,t], dim=1)
print("t_0:{} shape:{}\nt_1:{} shape:{}\nt_2:{} shape:{}".format(t_0, t_0.shape, t_1, t_1.shape, t_2, t_2.shape))
1.2 torch.stack(input,dim)
会扩张维度
如果dim不存在,创建新维度,在新维度上进行拼接
如果dim存在,将原有维度向后一维,在dim上进行拼接
t = torch.ones((2, 3))
t_stack = torch.stack([t, t], dim= 2)
t_stack2 = torch.stack([t, t, t], dim= 0)
print("\nt_stack:{} shape:{}\nt_stack2:{} shape:{}".format(t_stack, t_stack.shape, t_stack2,t_stack2.shape))
2 张量的切分
2.1 torch.chunk(input,chunks.dim)
将张量在dim上进行平均切分,若不能整除,最后一份张量小于其他张量
返回值为张量列表
a = torch.ones((2, 7))
list_of_tensors = torch.chunk(a, dim=1, chunks=3)
for idx, t in enumerate(list_of_tensors):
print("第{}个张量:{}, shape is {}".format(idx+1, t, t.shape))
2.2 torch.split(input,split_or_sections,dim)
split_or_sections为int时,按照int进切切分。
split_or_sections为list时,按照list进行切分。
返回值为张量列表
t = torch.ones((2, 5))
list_of_tensors = torch.split(t, [2, 1, 2], dim=1) # [2 , 1, 2]
for idx, t in enumerate(list_of_tensors):
print("第{}个张量:{}, shape is {}".format(idx+1, t, t.shape))
3 张量索引
3.1 torch.index.select(input,dim,index)
在dim上按照index索引数据
返回值为依index索引数据拼接的张量
t = torch.randint(0, 9, size=(3, 3))
idx = torch.tensor([0, 2], dtype=torch.long) # float
t_select = torch.index_select(t, dim=0, index=idx)
print("t:\n{}\nt_select:\n{}".format(t, t_select))
3.2 torch.masked_select(input,mask)
mask是与input同形状的布尔类型张量
返回值为一维张量
t = torch.randint(0, 9, size=(3, 3))
mask = t.ge(5) #ge大于等于5的为True
t_select = torch.masked_select(t, mask)
print("t:\n{}\nmask:\n{}\nt_select:\n{} ".format(t, mask, t_select))
4 张量的变换
4.1 torch.reshape(input,shape)
shape为新张量的维度
t = torch.randperm(8)
t_reshape = torch.reshape(t, (2, 4)) # -1
print("t:{}\nt_reshape:\n{}".format(t, t_reshape))
4.2 torch.transpose(input,dim1,dim2)
交换张量的两个维度
torch.t():2维张量转置等价于torch.transpose(input,0,1)
t = torch.rand((2, 3, 4))
t_transpose = torch.transpose(t, dim0=1, dim1=2) # c*h*w h*w*c
print("t shape:{}\nt_transpose shape: {}".format(t.shape, t_transpose.shape))
4.3 torch.squeeze(dim)
压缩掉长度为1的维度当dim指定时,当且仅当指定的维度长度为1时才会被压缩
t = torch.rand((1, 2, 3, 1))
t_sq = torch.squeeze(t)
t_0 = torch.squeeze(t, dim=0)
t_1 = torch.squeeze(t, dim=1)
print(t.shape)
print(t_sq.shape)
print(t_0.shape)
print(t_1.shape)
4.4 torch.unsqueeze(dim)
依据dim扩展维度
4.5 torch.add(input,alpha,other)
逐元素计算input+alpha+other
t_0 = torch.randn((3, 3))
t_1 = torch.ones_like(t_0)
t_add = torch.add(t_0, 10, t_1)
print("t_0:\n{}\nt_1:\n{}\nt_add_10:\n{}".format(t_0, t_1, t_add))