范数
有几种常见的范数,参考:https://blog.csdn.net/left_la/article/details/9159949
import torch
a = torch.full([8], 1.)
b = a.view([2, 4])
c = a.view([2, 2, 2])
print(b)
# tensor([[1, 1, 1, 1],
# [1, 1, 1, 1]])
print(c)
# tensor([[[1, 1],
# [1, 1]],
# [[1, 1],
# [1, 1]]])
print(a.norm(1), b.norm(1), c.norm(1))
#tensor(8.) tensor(8.) tensor(8.)
print(a.norm(2), b.norm(2), c.norm(2))
#tensor(2.8284) tensor(2.8284) tensor(2.8284)
print(b.norm(p=1, dim=0))
#tensor([2., 2., 2., 2.])
print(b.norm(p=1, dim=1))
#tensor([4., 4.])
统计属性
import torch
a = torch.rand([5,4])
print(a)
#tensor([[0.1559, 0.9757, 0.4115, 0.8225],
# [0.3751, 0.1588, 0.4010, 0.2453]])
print(a.min(), a.max(), a.mean(), a.prod(), a.sum()) #prod累乘 sum累加
#tensor(0.1559) tensor(0.9757) tensor(0.4432) tensor(0.0003) tensor(3.5458)
print(a.argmax(), a.argmin())
#tensor(2) tensor(6) 返回的是角标 默认拉平
b = torch.rand([5,4])
print(b)
# tensor([[0.3490, 0.4922, 0.7263, 0.0940],
# [0.2582, 0.1204, 0.5092, 0.8789],
# [0.2714, 0.6527, 0.9785, 0.5949],
# [0.0825, 0.7796, 0.7323, 0.9179],
# [0.0172, 0.8452, 0.3348, 0.4895]])
print(b.argmax(dim=1))
#tensor([2, 3, 2, 3, 1])
前几名,第几名 topk kthvalue
import torch
a = torch.rand([10])
print(a)
#tensor([0.6685, 0.1509, 0.9114, 0.8626, 0.2820, 0.3218, 0.9471, 0.3009, 0.6533, 0.4107])
print(a.topk(2))
# torch.return_types.topk(
# values=tensor([0.9471, 0.9114]),
# indices=tensor([6, 2]))
print(a.kthvalue(2))
# torch.return_types.kthvalue(
# values=tensor(0.2820),
# indices=tensor(4))
比较
import torch
a = torch.randint(0,10,[3,3])
print(a)
# tensor([[5, 0, 7],
# [2, 1, 3],
# [8, 1, 3]])
print(a>5)
# tensor([[False, False, True],
# [False, False, False],
# [ True, False, False]])
b = torch.randint(0,10,[3,3])
print(b)
# tensor([[6, 3, 6],
# [5, 1, 9],
# [6, 8, 2]])
print(torch.eq(a,b))
# tensor([[False, False, False],
# [False, True, False],
# [False, False, False]])