-
import torch
x = torch.rand(4, 3)
print(x)
'''
output:
tensor([[0.5573, 0.2395, 0.5920],
[0.6214, 0.2509, 0.7425],
[0.3225, 0.0648, 0.3799],
[0.0929, 0.9899, 0.1163]])
'''
-
import torch
x = torch.zeros(4, 3, dtype = torch.long)
print(x)
'''
output:
tensor([[0, 0, 0],
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])
'''
-
import torch
x = torch.rand(4, 3)
print(x)
y = torch.zeros_like(x)
z = torch.zero_(x)
print(x)
print(y)
print(z)
'''
output:
tensor([[0.0644, 0.5627, 0.0045],
[0.2518, 0.6186, 0.8489],
[0.1588, 0.7015, 0.5523],
[0.5772, 0.1593, 0.7644]])
tensor([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
tensor([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
tensor([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
可以发现torch.zero_()方法和torch.zeros_like()方法将原来的张量修改为全零张量,返回一个全零张量。
'''
-
import torch
x = torch.tensor([5.5, 3])
print(x)
'''
output:
tensor([5.5000, 3.0000])
'''
-
y = y.new_ones(4, 3, dtype=torch.double)
print(y)
print(y.shape)
print(y.size())
print(y.dtype)
'''
output:
tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]], dtype=torch.float64)
torch.Size([4, 3])
torch.Size([4, 3])
torch.float64
'''
y_ = torch.randn_like(y, dtype=torch.float)
print(y_)
print(y_.shape)
print(y_.size())
print(y_.dtype)
'''
tensor([[-1.0661, -0.8657, 2.1197],
[ 0.1403, 0.4686, 1.3409],
[ 0.6883, -0.6068, 0.9095],
[-0.1570, 1.3021, 1.0214]])
torch.Size([4, 3])
torch.Size([4, 3])
torch.float32
'''
-
x = torch.tensor([1, 2])
x = torch.ones([1, 2])
x = torch.zeros([1, 2])
x = torch.eye(2)
'''
tensor([[1., 0.],
[0., 1.]])
'''
x = torch.arange(1, 10, 2)
x = torch.linspace(1, 10, 3)
x = torch.rand(5)
x = torch.randn(5)
x = torch.zeros((2, 3))
x.normal_(0, 1)
'''
tensor([[ 1.0211, -1.2035, 1.5915],
[ 0.1101, -0.8491, 1.5457]])
'''
x = torch.normal(mean=torch.arange(1., 11.), std=torch.arange(1., 0, -0.1))
'''
tensor([0.7151, 2.9019, 2.9995, 3.5276, 4.5882, 5.6927, 7.7414, 8.2401, 8.8037,
9.8141])
返回一个张量,包含从给定参数means,std的离散正态分布中抽取随机数。 均值means是一个张量,包含每个输出元素相关的正态分布的均值。
'''
x = torch.randperm(5)
- 张量操作
-
import torch
x = torch.rand(4, 3)
y = torch.rand(4, 3)
print(x)
print(y)
print("---------------")
print(x + y)
print(torch.add(x, y))
y.add_(x)
print(y)
'''
tensor([[0.4765, 0.9658, 0.9384],
[0.9804, 0.6404, 0.8394],
[0.4807, 0.8789, 0.4184],
[0.5555, 0.2453, 0.7822]])
tensor([[0.2453, 0.2788, 0.8920],
[0.4451, 0.0579, 0.0933],
[0.4913, 0.6723, 0.8088],
[0.2263, 0.5807, 0.6181]])
---------------
tensor([[0.7218, 1.2446, 1.8304],
[1.4255, 0.6983, 0.9327],
[0.9720, 1.5512, 1.2272],
[0.7819, 0.8261, 1.4003]])
tensor([[0.7218, 1.2446, 1.8304],
[1.4255, 0.6983, 0.9327],
[0.9720, 1.5512, 1.2272],
[0.7819, 0.8261, 1.4003]])
tensor([[0.7218, 1.2446, 1.8304],
[1.4255, 0.6983, 0.9327],
[0.9720, 1.5512, 1.2272],
[0.7819, 0.8261, 1.4003]])
'''
-
import torch
x = torch.rand(4, 3)
print(x)
print(x[:, 1])
'''
tensor([[0.6915, 0.0722, 0.7704],
[0.1637, 0.5775, 0.7028],
[0.5363, 0.6569, 0.9537],
[0.0964, 0.0600, 0.0600]])
tensor([0.0722, 0.5775, 0.6569, 0.0600])
'''
-
y = x[0,:]
y += 1
print(y)
print(x[0, :])
'''
tensor([1.6915, 1.0722, 1.7704])
tensor([1.6915, 1.0722, 1.7704])
可以看到,原来的x张量也被修改了
'''
import copy
y = copy.deepcopy(x[0,:])
y += 1
print(y)
print(x[0, :])
'''
tensor([1.6915, 1.0722, 1.7704])
tensor([0.6915, 0.0722, 0.7704])
使用深拷贝方法时,张量x的值就不会被修改
'''
-
x = torch.randn(4, 4)
y = x.view(16)
z = x.view(-1, 8)
print(x.shape, y.shape, z.shape)
'''
torch.Size([4, 4]) torch.Size([16]) torch.Size([2, 8])
'''
-
x += 1
print(x)
print(y)
'''
tensor([[-0.6099, 0.3227, 0.0073, 1.4421],
[ 2.7223, 0.0773, 1.2584, 2.1311],
[ 1.7686, 0.6116, 0.0822, 1.0682],
[ 0.8888, 0.5013, 3.0155, 2.7268]])
tensor([-0.6099, 0.3227, 0.0073, 1.4421, 2.7223, 0.0773, 1.2584, 2.1311,
1.7686, 0.6116, 0.0822, 1.0682, 0.8888, 0.5013, 3.0155, 2.7268])
以上结果说明,x和y是同一个张量,他们共享内存,torch.view()函数实际上只改变了该张量的观察角度
'''
-
x = torch.randn(4, 4)
y = x.view(16)
z = x.view(-1, 8)
print(x.shape, y.shape, z.shape)
print(id(x))
print(id(y))
print(id(z))
'''
x = torch.randn(4, 4)
y = x.view(16)
z = x.view(-1, 8)
print(x.shape, y.shape, z.shape)
print(id(x))
print(id(y))
print(id(z))
可以看到他们的id相同,也都是同一个张量
'''
-
x = torch.randn(1)
print(type(x))
print(type(x.item()))
print(x.item())
'''
<class 'torch.Tensor'>
<class 'float'>
-2.635510206222534
'''
-
x = torch.arange(1, 3).view(1, 2)
print(x)
y = torch.arange(1, 4).view(3, 1)
print(y)
print(x + y)
'''
tensor([[1, 2]])
tensor([[1],
[2],
[3]])
tensor([[2, 3],
[3, 4],
[4, 5]])
x和y分别是一行两列和3行一列的矩阵,如果要运算的话需要将维度保持一致,将x复制两行变成三行两列,将y复制一列变成三行两列,这样维度一致之后,就可以运算了。
'''