flag =True# flag = Falseif flag:
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(w, x)
b = torch.add(w,1)
y = torch.mul(a, b)
y.backward(retain_graph=True)print(w.grad)
y.backward()print(w.grad)
tensor([5.])
tensor([10.])
1.2 grad_tensors
功能:多个梯度之间权重的设置
flag =True# flag = False
flag =True# flag = Falseif flag:
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(w, x)# retain_grad()
b = torch.add(w,1)
y0 = torch.mul(a, b)# y0 = (x+w) * (w+1) dy0/dw = 5
y1 = torch.add(a, b)# y1 = (x+w) + (w+1) dy1/dw = 2
loss = torch.cat([y0, y1], dim=0)# [y0, y1]
grad_tensors = torch.tensor([1.,2.])
loss.backward(gradient=grad_tensors)# gradient 传入 torch.autograd.backward()中的grad_tensorsprint(w.grad)