import torch import numpy as np # 1.张量拼接 t = torch.ones((2, 3)) t_0 = torch.cat([t, t], dim=0) t_1 = torch.cat([t, t], dim=1) # 2.张量拼接,扩展维度 # torch.stack t = torch.ones((2, 3)) t_stack = torch.stack([t, t], dim=0) # 3.chunk切分张量 a = torch.ones((2, 7)) list_of_tensor = torch.chunk(a, dim=1, chunks=3) for idx, t in enumerate(list_of_tensor): print("idx:{},tensor:{}".format(idx, t)) # 4.split将张量按维度dim切分 a = torch.ones((2, 7)) list_of_tensor = torch.split(a, [2, 4, 1], dim=1) for idx, t in enumerate(list_of_tensor): print("idx:{},tensor:{}".format(idx, t)) # 5.按照索引选择 t = torch.randint(0, 9, size=(3, 3)) print(t) idx = torch.tensor([0, 2], dtype=torch.long) print(idx) t_select = torch.index_select(t, dim=1, index=idx) print(t_select) # 6.mask操作,生成的是向量,ge函数按值 t = torch.randint(0, 9, size=(3, 3)) print(t) mask = t.ge(5) print(mask) t_select = torch.masked_select(t, mask) print(t_select) # 7.张量的变换:transpose,包括不限于转置,交换维度 t = torch.rand((2, 3, 4)) # print(t) t_transpose = torch.transpose(t, dim0=1, dim1=2) # print(t_transpose) # 二维向量转置 t_test = torch.rand(3, 4) t_trans = torch.t(t_test) print(t_test) print(t_trans) # torch.squeeze()压缩长度为1的维度 # torch.unsqueeze() expand the length of dim # 8. ones_like,同大小全1矩阵 t_0 = torch.randn((3, 3)) t_1 = torch.ones_like(t_0) print(t_0) print(t_1) t_add = torch.add(t_0, t_1, alpha=10) print(t_add) print(t_add.data) print(t_add.data.numpy())