数据操作
import torch
#1. [0,12)
x = torch.arange(12)
print(x)
#2.
print(x.shape) #torch.Size([12])
print(x.numel()) #12
#3.
x = x.reshape(3,4)
print(x) #3*4
#4. 创造全0和全1
y = torch.zeros(2,3,4) #2*3*4
print(y)
z = torch.ones(2,3,4)
print(z)
#5.
x1 = torch.tensor([[[2,1,4,3],[1,2,3,4],[4,3,2,1]]])
print(x1)
print(x1.shape) #1,3,4
#6. 张量连接
x = torch.arange(12,dtype=torch.float32).reshape(3,4)
y = torch.tensor([[2.0,1,4,3],[1,2,3,4],[4,3,2,1]]) #3*4
z1 = torch.cat((x,y),dim=0) #一行一行链接,第一个维度3
z2 = torch.cat((x,y),dim=1) #一列一列链接,第二个维度4
print("z1 = ",z1)
print("z2 = ",z2)
#7.
print(x)
print(y)
print(x==y)
#8.
total = x.sum()
print(total)
#9. 形状不同时? 广播机制
a = torch.arange(3).reshape(3,1) #3*1
print(a)
b = torch.arange(2).reshape(1,2) #1*2
print(b)
print(a+b) #3*2,此时a复制成3*2,b复制成3*2
#10.
x = torch.arange(12,dtype=torch.float32).reshape(3,4)
print(x)
print(x[-1]) #最后一行
print(x[1:3]) #序号为1,2的两行
x[1,2] = 9 #第二行第三列赋值为9
x[0:2, : ] = 3.14 #序号为0,1的两行 && all col
print(x)
#11. 内存位置相关问题
x = torch.arange(12,dtype=torch.float32).reshape(3,4)
y = torch.tensor([[2.0,1,4,3],[1,2,3,4],[4,3,2,1]]) #3*4
before = id(y)
y = y + x
res = id(y) == before
print(res) #不同
z = torch.zeros_like(y)
print(id(z))
z[:] = x + y
print(id(z)) #相同
before = id(x)
x += y
print(id(x) == before) #相同
#11. 转化为numpy张量
a = x.numpy()
b = torch.tensor(a)
print(type(a))
print(type(b))
#12. 把大小为1的张量转化为python标量
a = torch.tensor([3.5])
print(a,a.item(),float(a),int(a))
数据预处理
#缺失的数据(数字)处理成了已有数据的均值,那么字符串呢?
线性代数
#13. 矩阵转置
a = torch.arange(20).reshape(5,4)
print(a)
print(a.T)
b = torch.tensor([[1,2,3],[2,0,4],[3,4,5]])
print(b)
print(b.T)
print(b == b.T)
#14.a.clone()
a = torch.arange(20,dtype=torch.float32).reshape(5,4)
b = a.clone() #分配新内存
print(a)
print(a+b)
print(a*b)
k = 2
print(a+k,a*k)
#15. 求和
print(a)
print(a.sum(axis=0)) #第一个维度5
print(a.sum(axis=1)) #第二个维度4
print(a.sum(axis=[0,1])) #对两个维度进行求和
print(a.sum())
#16. 求均值
print(a.mean() == a.sum()/a.numel())
print(a.mean(axis=0))
print(a.sum(axis=0)/a.shape[0])
print(a.shape[0],a.shape[1],a.shape)
#17. 计算总和或均值时如何保持轴数不变?、
sum_a = a.sum(axis=1,keepdims=True)
print(a)
print(sum_a)
print(a/sum_a) #这个时候利用了广播机制
#18. 某个轴实现元素的累计总和
print(a)
print(a.cumsum(axis=0))
print(a)
print(a.cumsum(axis=1))
#19. 点积——求和xi*yi
x = torch.arange(4,dtype=torch.float32)
y = torch.ones(4)
print(x,y)
print(torch.dot(x,y)) #元素需要相同数据类型
#也可以这样
print(torch.sum(x*y))
#20. 矩阵*向量 torch.mv() 矩阵*矩阵torch.mm()
A = torch.arange(20).reshape(5,4)
x = torch.arange(4)
print(A,x)
print(A.shape,x.shape)
print(torch.mv(A,x))
A = torch.arange(20,dtype=torch.float32).reshape(5,4)
B = torch.ones(4,5)
print(A)
print(B)
print(torch.mm(A,B))
#21. 向量的范数
#L2范数 平方根求和再开根号
u= torch.tensor([3.0 ,-4])
print(torch.norm(u)) #5.0
#L1范数 绝对值求和
print(torch.abs(u).sum()) #7.0
#22. 矩阵的范数
#弗罗贝尼乌斯范数 元素平方再求和
A = torch.ones(4,9)
print(torch.norm(A)) #6.0
x = torch.arange(12).reshape(3,4)
print(x)
print(x.sum(axis=[0,1]) == x.sum()) #不会重复计算相同的元素