pytorch 2 variable 变量
import torch
from torch.autograd import Variable
tensor = torch.FloatTensor([[1, 2], [3, 4]])
variable = Variable(tensor, requires_grad=True)
print(tensor) # tensor([[1., 2.], [3., 4.]])
print(variable) # tensor([[1., 2.], [3., 4.]], requires_grad=True) 梯度设置为True,反向传播要使用
t_out = torch.mean(tensor*tensor) # x^2
v_out = torch.mean(variable*variable)
print(t_out) # tensor(7.5000) 变量计算的时候比张量多了一个梯度信息
print(v_out) # tensor(7.5000, grad_fn=<MeanBackward1>)
v_out.backward() # 反向传播
# v_out = 1/4*sun(var*var) # 求导过程
# d(v_out)/d(var) = 1/4*2*variable = variable/2 # 求导过程
print(variable.grad) # tensor([[0.5000, 1.0000], [1.5000, 2.0000]])
print("variable:", variable) # tensor([[1., 2.], [3., 4.]], requires_grad=True)
print("variable.data:", variable.data) # tensor([[1., 2.], [3., 4.]])
print("variable.data.numpy:", variable.data.numpy()) # [[1. 2.] [3. 4.]]
END
posted @ 2019-02-26 19:20 YangZhaonan 阅读(...) 评论(...) 编辑 收藏