"""
torch.autograd.backward(
tensor, 用于求导的张量
grad_tensors, 多梯度权重
retain_graph,保存计算图
create_graph 创建导数计算图,用于高阶求导
)"""
# y=(x+w)*(w+1)# a=x+w# b=w+1# y=a*b
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(w, x)
b = torch.add(w,1)
y = torch.mul(a, b)
y.backward()print(w.grad)
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(w, x)
b = torch.add(w,1)
y0 = torch.mul(a, b)
y1 = torch.add(a, b)
loss = torch.cat([y0, y1], dim=0)
grad_t = torch.tensor([1.,1.]) #多梯度权重
loss.backward(gradient=grad_t)print(w.grad)