import torch
from torch.autograd import Variable
tensor = torch.FloatTensor([[1,2],[3,4]])
variable = Variable(tensor,requires_grad=True)
print(tensor)
print(variable)
print(tensor*tensor)
t_out = torch.mean(tensor*tensor) #x^2
v_out = torch.mean(variable*variable)
print(t_out)
print(v_out)
v_out.backward()
#v_out = 1/4 * sum(var*var)
#d(v_out)/d(var) = 1/4*2*variable = variable/2
print("\nvariable.grad:",variable.grad)
print("\nvariable.data:",variable.data)
print(variable.data.numpy())
运行结果:
tensor([[ 1., 2.],
[ 3., 4.]])
tensor([[ 1., 2.],
[ 3., 4.]])
tensor([[ 1., 4.],
[ 9., 16.]])
tensor(7.5000)
tensor(7.5000)
variable.grad: tensor([[ 0.5000, 1.0000],
[ 1.5000, 2.0000]])
variable.data: tensor([[ 1., 2.],
[ 3., 4.]])
[[1. 2.]
[3. 4.]]