torch中Variable的作用

Variable可以把输出的Tensor变成一个输入变量,这样梯度就不会回传了。detach()也是可以的如果都不加那么就得retain_graph=True了,否则报错

import torch
import torch.nn as nn
from torch.autograd import Variable

class g(nn.Module):
    def __init__(self):
        super(g, self).__init__()
        self.k = nn.Conv2d(in_channels=2, out_channels=1, kernel_size=1, padding=0, bias=False)

    def forward(self, z):
        # a, b = torch.topk(z, 2, dim=-1, largest=True, sorted=True)
        a = self.k(z)
        return a
        # return self.k(a)


c = 2
h = 5
w = 5
z1 = torch.arange(0, c * h * w).float().view(1, c, h, w)
z2 = torch.arange(0, c * h * w).float().view(1, c, h, w)+5
k1 = g()
k2 = g()
r1 = k1(z1)
r2 = k2(z2)
# print(r1,Variable(r1))
# loss1 = nn.MSELoss()(r1,Variable(r2))
# loss2 = nn.MSELoss()(Variable(r1),r2)
# loss1 = nn.MSELoss()(r1,r2.detach())
# loss2 = nn.MSELoss()(r1.detach(),r2)
loss1 = nn.MSELoss()(r1,r2)
loss2 = nn.MSELoss()(r1,r2)
loss1.backward()
loss2.backward()
# print("zgrad", z.grad)

输出

tensor([[[[ -8.4214,  -8.9186,  -9.4158,  -9.9130, -10.4102],
          [-10.9074, -11.4046, -11.9018, -12.3990, -12.8961],
          [-13.3933, -13.8905, -14.3877, -14.8849, -15.3821],
          [-15.8793, -16.3765, -16.8737, -17.3709, -17.8681],
          [-18.3653, -18.8625, -19.3596, -19.8568, -20.3540]]]],
       grad_fn=<MkldnnConvolutionBackward>) tensor([[[[ -8.4214,  -8.9186,  -9.4158,  -9.9130, -10.4102],
          [-10.9074, -11.4046, -11.9018, -12.3990, -12.8961],
          [-13.3933, -13.8905, -14.3877, -14.8849, -15.3821],
          [-15.8793, -16.3765, -16.8737, -17.3709, -17.8681],
          [-18.3653, -18.8625, -19.3596, -19.8568, -20.3540]]]])

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值