PyTorch Basics: Understanding Autograd and Computation Graphs (paperspace.com)
import torch
w1 = torch.randn(1, requires_grad=True)
w2 = torch.randn(1, requires_grad=True)
w3 = torch.randn(1, requires_grad=True)
w4 = torch.randn(1, requires_grad=True)
a = torch.tensor(1.)
print("a = ", a.data)
print("w = ", w1.data, w2.data, w3.data, w4.data)
b = w1 * a
c = w2 * a
d = w3 * b + w4 * c # compute graph
print("d = ", d.data)
L = (10 - d).sum() # Loss function
print("L = ", L.data)
L.backward() # Back propagate
print("grad : ",w1.grad, w2.grad, w3.grad, w4.grad)
learning_rate = 1
w1.data = w1.data - learning_rate * w1.grad.data
w2.data = w2.data - learning_rate * w2.grad.data
w3.data = w3.data - learning_rate * w3.grad.data
w4.data = w4.data - learning_rate * w4.grad.data
print("w new = ", w1.data, w2.data, w3.data, w4.data)
# calculate: y = 1 / (exp( —( w * x + b )) + 1 )
# x = 1,w = 0,b = 0
import math
import torch
x = torch.tensor(1., requires_grad=True)
w = torch.tensor(0., requires_grad=True)
b = torch.tensor(0., requires_grad=True)
h1 = w * x
h2 = h1 + b
h3 = h2 * (-1)
# h4 = math.exp(h3) # 会将Tensor类型转换为float
h4 = torch.exp(h3)
h5 = h4 + 1
h6 = 1 / h5
y = h6
y.backward()
print(f'grad of x: {x.grad}')
print(f'grad of w: {w.grad}')
print(f'grad of b: {b.grad}')