参考链接: PyTorch Tutorial 03 - Gradient Calculation With Autograd
三种方式:
# x.requires_grad_(False)
# x.detach()
# with torch.no_grad():
'''
prevent PyTorch from tracking history and calculating gradients
'''
import torch
torch.manual_seed(seed=20200910)
x = torch.randn(3, requires_grad=True)
print(x) # tensor([ 0.2824, -0.3715, 0.9088], requires_grad=True)
# x.requires_grad_(False)
# x.detach()
# with torch.no_grad():
# # 方式1
# x.requires_grad_(False)
# print(x) # tensor([ 0.2824, -0.3715, 0.9088])
# # 方式2
# y = x.detach()
# print(y) # tensor([ 0.2824, -0.3715, 0.9088])
# # 方式3
# with torch.no_grad():
# y = x + 2
# print(y) # requires gradients tensor([2.2824, 1.6285, 2.9088])
# z = x + 2
# print(z) # don't requires gradients tensor([2.2824, 1.6285, 2.9088], grad_fn=<AddBackward0>)