pytorch2——张量操作

数据操作

张量拼接

拼接:cat

dim = num,在第 num 维度上相加

import torch
# 张量拼接
# cat
t = torch.ones(2,3)
print("before cat dim = 0,\tshape of t:",t.shape)
t = torch.cat([t,t],dim=0)
print("after cat dim = 0,\tshape of t:",t.shape)
print("t:",t)

t = torch.ones(2,3)
print("before cat dim = 1,\tshape of t:",t.shape)
t = torch.cat([t,t],dim=1)
print("after cat dim = 0,\tshape of t:",t.shape)
print("t:",t)

堆积:stack

堆积会在原来维度的基础上扩充维度

import torch

# stack : 堆积
t = torch.ones((2,3))
print("before stack dim = 0\tshape of t:",t.shape)
t = torch.stack([t,t,t],dim=0)
print("after stack dim = 0\tshape of t:",t.shape)
print("t:",t)

t = torch.ones((2,3))
print("before stack dim = 0\tshape of t:",t.shape)
t = torch.stack([t,t,t],dim=1)
print("after stack dim = 0\tshape of t:",t.shape)
print("t:",t)

t = torch.ones((2,3))
print("before stack dim = 0\tshape of t:",t.shape)
t = torch.stack([t,t,t],dim=2)
print("after stack dim = 0\tshape of t:",t.shape)
print("t:",t)

切分

平均切分:chunk

import torch

t = torch.ones(2,7)
print("before chunk dim=1,chunks=3\tshape of t:",t.shape)
list_sub_t = torch.chunk(t,dim=1,chunks=3)
for i,sub_t in enumerate(list_sub_t):
    print(f"after chunk dim=1,chunks=3\tshape of sub_t[{i}]:",sub_t.shape)

自定义切分:split

import torch

t = torch.ones(2,5)
print("before split dim=1\tshape of t:",t.shape)
list_sub_t = torch.split(t,[2,1,1,1],dim=1)
for i,sub_t in enumerate(list_sub_t):
    print(f"after split dim=1\tshape of sub_t[{i}]:",sub_t.shape)

索引

import torch 

t = torch.arange(1,10).reshape(3,3)
print("t:",t)
idx = torch.tensor([0,2],dtype=torch.long)
print("idx",idx)
selected_t = torch.index_select(t,dim=0,index=idx)
print("selected t:",selected_t)
# 更多使用如下方式
print(t[[0,2],:])
print(t[0,0])

# masked_select()
t = torch.arange(1,10).reshape(3,3)
print("t:",t)
# lt:< le:<= gt:> ge:>=
mask = t.le(5)
print("mask:",mask)
masked_t = torch.masked_select(t,mask)
print("masked select t:",masked_t)

维度变化

维度重塑:reshape

import torch

# 张量变换 原张量如果在内存中连续存储,新张量与之共享内存
# reshape()
t = torch.randperm(8)
print("before reshape, t:",t)
re_t = torch.reshape(t,(4,2))
print("after reshape, shape of re_t:",re_t.shape)
re_t = torch.reshape(t,(-1,2))
print("after reshape, shape of re_t:",re_t.shape)
print(id(re_t) == id(t))
re_t[1,1] = 1000
print(t)

维度交换:transpose

import torch

t = torch.rand(2,3,4)
print(t)
t_t = torch.transpose(t,dim0=1,dim1=2)
print("after transpose, shape of t",t_t.shape)

转置 二维概念

# t():二维矩阵转置
t_0 = t_t[0,:,:]
print("shape of t[0]:",t_0.shape)
t_0_t = torch.t(t_0)
print("shape of t[0].t",t_0_t.shape)

张量压缩:squeeze

# 压缩两侧为1的张量
import torch
# squeeze() 压缩
t = torch.rand(1,2,3,1)
print("before squeeze,shape of t:",t.shape)
print("after squeeze,shape of t:",torch.squeeze(t).shape)
print("after squeeze dim=0,shape of t:",torch.squeeze(t,dim=0).shape)

张量扩充:unsqueeze

import torch
# unsqueeze() 扩展
t = torch.tensor([1,2,3,4]).reshape(2,2)
t = torch.unsqueeze(t,dim=1)
print(t.shape)

数学运算 (略)

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值