pytorch:tensor学习

`参考网易云课堂 :pytorch课程
``

#type check
import torch
a=torch.randn(2,3)
a.type()#torch的默认数据类型是torch.FloatTensor torch.FloatTensor
type(a)#torch.Tensor
isinstance(a,torch.FloatTensor)#是不是torch.FloatTensor的实例

import torch
b=torch.randn(2,3)
isinstance(b,torch.cuda.FloatTensor) #是不是cuda的tensor False

b=b.cuda()
isinstance(b,torch.cuda.FloatTensor) #是不是cuda的tensor True

#Dimension 0 /rank 0  0维张量 scalar
torch.tensor(1.)#out:tensor(1.)
torch.tensor(1.3)#out:tensor(1.3000)
#0维张量通常记录loss


a=torch.tensor(2.2)
a.shape #shape是属性 out:torch.Size([])
len(a.shape) # 0维张量 out:0
a.size()#size() 是方法 out:torch.Size([])

#Dim 1 /rank 1 表示Bias 和Linear Input
import numpy  as np 
torch.tensor([1.1]) #tensor 通常输入数据 out:tensor([1.1000])
torch.tensor([1.1,2.2]) # out:tensor([1.1000, 2.2000])
torch.FloatTensor(1)# Tensor 1维张量 1*1  通常输入维度out:tensor([0.])
torch.FloatTensor(2)#  1维向量 1*2 out:tensor([-1.8891e+26,  6.0240e-01]) 
data = np.ones(2)# out:array([1., 1.])
data
torch.from_numpy(data)# numpy类型转化为tensor tensor([1., 1.],dtype=torch.float64)
a=torch.ones(2)
a.shape #torch.Size([2]) 1*2
print(a) #out:tensor([1., 1.])

#Dim 2 Linear Input batch
a= torch.randn(2,3)
a
'''out:tensor([[ 0.6240,  2.0328,  0.9443],
        [ 2.6551, -0.4404,  0.9255]])'''
a.shape #out:torch.Size([2, 3])

a.size(0) #out:2 第0维 

a.size(1) #out:3 第1维

a.shape[1] #out: 3 第1维


#Dim 3 RNN Input Batch
a=torch.rand(1,2,3)
a
#out:tensor([[[0.2152, 0.5335, 0.2066],
#       [0.7523, 0.3732, 0.8527]]])

a.shape #out:torch.Size([1, 2, 3])

a[0]  #第0维的第一个元素
'''out:tensor([[0.9203, 0.5069, 0.9590],
        [0.7210, 0.4653, 0.6650]])'''
list(a.shape) # 将shape转化为 list [1, 2, 3]

#Dim 4 CNN:[b,c,h,w]

a=torch.rand(2,3,28,28)
a

'''tensor([[[[0.6759, 0.9713, 0.0646,  ..., 0.5549, 0.0636, 0.8658],
          [0.3369, 0.1476, 0.6934,  ..., 0.1776, 0.3889, 0.0094],
          [0.8988, 0.1344, 0.2402,  ..., 0.7077, 0.4933, 0.5062],
          ...,
          [0.0612, 0.3898, 0.3442,  ..., 0.1560, 0.2195, 0.1732],
          [0.7812, 0.0152, 0.5913,  ..., 0.5797, 0.4522, 0.0352],
          [0.5224, 0.0504, 0.4819,  ..., 0.0505, 0.0135, 0.3170]],

         [[0.3579, 0.0598, 0.6442,  ..., 0.0012, 0.6761, 0.4840],
          [0.2298, 0.8767, 0.8833,  ..., 0.9134, 0.0425, 0.7888],
          [0.1658, 0.9131, 0.2276,  ..., 0.7643, 0.4702, 0.9145],
          ...,
          [0.9774, 0.6123, 0.4043,  ..., 0.4849, 0.9797, 0.4164],
          [0.1306, 0.9391, 0.3134,  ..., 0.3248, 0.7115, 0.7868],
          [0.1549, 0.3434, 0.3727,  ..., 0.3523, 0.6315, 0.0665]],

         [[0.4383, 0.9425, 0.6921,  ..., 0.3522, 0.3803, 0.2554],
          [0.2029, 0.7555, 0.6493,  ..., 0.3611, 0.7241, 0.6710],
          [0.6435, 0.7510, 0.7974,  ..., 0.8045, 0.1471, 0.9912],
          ...,
          [0.4757, 0.9005, 0.9161,  ..., 0.0181, 0.2160, 0.6298],
          [0.8867, 0.3361, 0.0025,  ..., 0.3427, 0.8465, 0.5595],
          [0.4946, 0.4659, 0.4100,  ..., 0.3036, 0.6601, 0.4399]]],


        [[[0.8791, 0.3159, 0.0769,  ..., 0.7711, 0.2596, 0.0475],
          [0.3389, 0.7433, 0.4076,  ..., 0.0110, 0.9999, 0.4852],
          [0.7807, 0.3513, 0.1050,  ..., 0.3872, 0.4660, 0.1703],
          ...,
          [0.1673, 0.5485, 0.0170,  ..., 0.0295, 0.1702, 0.7816],
          [0.7814, 0.7907, 0.3604,  ..., 0.2053, 0.6924, 0.0176],
          [0.1254, 0.3204, 0.1527,  ..., 0.9376, 0.8916, 0.6737]],

         [[0.6251, 0.1027, 0.6007,  ..., 0.6132, 0.7419, 0.7421],
          [0.2701, 0.8471, 0.5076,  ..., 0.8504, 0.2993, 0.1767],
          [0.4140, 0.0340, 0.4613,  ..., 0.9433, 0.1824, 0.8491],
          ...,
          [0.7890, 0.6668, 0.5956,  ..., 0.8830, 0.0902, 0.2278],
          [0.2591, 0.8536, 0.3024,  ..., 0.7185, 0.3891, 0.3179],
          [0.0066, 0.0240, 0.1202,  ..., 0.2205, 0.7135, 0.8709]],

         [[0.9040, 0.8275, 0.1924,  ..., 0.5676, 0.1249, 0.7311],
          [0.3418, 0.7847, 0.2311,  ..., 0.5362, 0.8770, 0.9601],
          [0.1311, 0.5871, 0.5859,  ..., 0.5205, 0.6691, 0.6326],
          ...,
          [0.2285, 0.3921, 0.6860,  ..., 0.1432, 0.9636, 0.6154],
          [0.9125, 0.4325, 0.6857,  ..., 0.4565, 0.2834, 0.2473],
          [0.3111, 0.9323, 0.4097,  ..., 0.3758, 0.1607, 0.4282]]]])'''

a.shape #out: torch.Size([2, 3, 28, 28])
 



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值