import torch from torch.autograd import Variable x=Variable(torch.zeros(2,2), requires_grad = True)#requires_grad = True需要求梯度 y=Variable(torch.ones(2,2), requires_grad = True) z=torch.sum(x+y)#,2*2矩阵求和,z求和后变成了标量 print(z.data) z.backward() print(x.grad) print("-------------------------------------") x=Variable(torch.FloatTensor([2]),requires_grad=True)#torch.FloatTensor([2],表示x=2,也就是求x=2的导数 z=(x+2)**2 z.backward() print(x.grad)
import torch
from torch.autograd import Variable
x=Variable(torch.zeros(2,2),
requires_grad = True)#requires_grad = True需要求梯度
y=Variable(torch.ones(2,2),
requires_grad = True)
z=torch.sum(x+y)#,2*2矩阵求和,z求和后变成了标量
print(z.data)
z.backward()
print(x.grad)
print("-------------------------------------")
x=Variable(torch.FloatTensor([2]),requires_grad=True)#torch.FloatTensor([2],表示x=2,也就是求x=2的导数
z=(x+2)**2
z.backward()
print(x.grad)
Out: