import torch as tt
tt.manual_seed(1)
<torch._C.Generator at 0x683af10>
torch.manual_seed(args.seed) #为CPU设置种子用于生成随机数,以使得结果是确定的
if args.cuda:
torch.cuda.manual_seed(args.seed)#为当前GPU设置随机种子;如果使用多个GPU,应该使用torch.cuda.manual_seed_all()为所有的GPU设置种子。
# example 1
# torch.cat
flag = True
if flag:
t = tt.ones((2, 3))
t_0 = tt.cat([t, t], dim = 0)
t_1 = tt.cat([t, t, t], dim = 1)
print("t_0:{} shape:{}\nt_1:{} shape:{}".format(t_0, t_0.shape, t_1, t_1.shape))
t_0:tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]]) shape:torch.Size([4, 3])
t_1:tensor([[1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1., 1., 1., 1.]]) shape:torch.Size([2, 9])
# example 2
#tortch.stack
flag_ex2 = True
if flag_ex2:
t = tt.ones((2, 3))
t_stack = tt.stack([t, t, t], dim = 0)
print("\nt_stack:{} shape:{}".format(t_stack, t_stack.shape))
t_stack:tensor([[[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.]],
[[1., 1., 1.],
[1., 1., 1.]]]) shape:torch.Size([3, 2, 3])
# example 3
# torch.chunk
flag_ex3 = True
if flag_ex3:
a = tt.ones((2,7))
list_of_tensors = tt.chunk(a, dim = 1, chunks = 3)
for idx, t in enumerate(list_of_tensors):
print("Tensor No.{}, shape is {}".format(idx+1, t, t.shape))
Tensor No.1, shape is tensor([[1., 1., 1.],
[1., 1., 1.]])
Tensor No.2, shape is tensor([[1., 1., 1.],
[1., 1., 1.]])
Tensor No.3, shape is tensor([[1.],
[1.]])
# example 4
# torch.split
t = tt.ones((2,5))
list_of_tensors = tt.split(t, [2,1,2], dim=1)
for idx, t in enumerate(list_of_tensors):
print("Tensor No.{}, shape is {}".format(idx+1, t, t.shape))
Tensor No.1, shape is tensor([[1., 1.],
[1., 1.]])
Tensor No.2, shape is tensor([[1.],
[1.]])
Tensor No.3, shape is tensor([[1., 1.],
[1., 1.]])
# example 5
# torch.index_select
t = tt.randint(0,9, size = (3,3))
idx = tt.tensor([0,2], dtype = tt.long)
t_select = tt.index_select(t, dim = 0, index = idx)
print("t:\n{}\nt_select:\n{}".format(t, t_select))
t:
tensor([[4, 5, 0],
[5, 7, 1],
[2, 5, 8]])
t_select:
tensor([[4, 5, 0],
[2, 5, 8]])
# example 6
# torch.masked_select
t = tt.randint(0,9, size=(3,3))
mask = t.le(5)
t_select = tt.masked_select(t,mask)
print("t:\n{}\nmask:\n{}\nt_select:\n{}".format(t,mask,t_select))
t:
tensor([[0, 2, 3],
[1, 8, 4],
[0, 3, 6]])
mask:
tensor([[ True, True, True],
[ True, False, True],
[ True, True, False]])
t_select:
tensor([0, 2, 3, 1, 4, 0, 3])
# example 7 torch.reshape
t = tt.randperm(8)
t_reshape = tt.reshape(t, (-1,2,2))
print("t:\n{}\nt_reshape:\n{}".format(t,t_reshape))
t:
tensor([2, 0, 1, 6, 3, 4, 7, 5])
t_reshape:
tensor([[[2, 0],
[1, 6]],
[[3, 4],
[7, 5]]])
# example 8 torch.transpose
t = tt.rand((2,3,4))
t_transpose = tt.transpose(t, dim0 = 1, dim1 = 2)
print("t shape:\n{}\nt_transpose shape:\n{}\n".format(t,t_transpose.shape))
t shape:
tensor([[[0.5239, 0.7981, 0.7718, 0.0112],
[0.8100, 0.6397, 0.9743, 0.8300],
[0.0444, 0.0246, 0.2588, 0.9391]],
[[0.4167, 0.7140, 0.2676, 0.9906],
[0.2885, 0.8750, 0.5059, 0.2366],
[0.7570, 0.2346, 0.6471, 0.3556]]])
t_transpose shape:
torch.Size([2, 4, 3])
# example 9 torch.squeeze
t = tt.rand((1,2,3,1))
t_sq = tt.squeeze(t)
t_0 = tt.squeeze(t,dim = 0)
t_1 = tt.squeeze(t,dim = 1)
print("t.shape:\n{}\nt_sq shape:\n{}\nt_0 shape:\n{}\nt_1 shape:\n{}\n"
.format(t,t_sq.shape,t_0.shape,t_1.shape))
t.shape:
tensor([[[[0.4452],
[0.0193],
[0.2616]],
[[0.7713],
[0.3785],
[0.9980]]]])
t_sq shape:
torch.Size([2, 3])
t_0 shape:
torch.Size([2, 3, 1])
t_1 shape:
torch.Size([1, 2, 3, 1])
# example 10 torch.add
t_0 = tt.randn((3,3))
t_1 = tt.ones_like(t_0)
t_add = tt.add(t_0, 10 , t_1)
print("t_0:\n{}\nt_1:\n{}\nt_add_10:\n{}"
.format(t_0,t_1,t_add))
t_0:
tensor([[ 0.2424, 0.8616, 0.0727],
[ 1.3484, -0.8737, -0.2693],
[-0.5124, -0.2997, 0.6655]])
t_1:
tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]])
t_add_10:
tensor([[10.2424, 10.8616, 10.0727],
[11.3484, 9.1263, 9.7307],
[ 9.4876, 9.7003, 10.6655]])