错误代码
错误的原因在于
y1 = 0.5*x*2-1.2*x
y2 = x**3
没有放到循环里面,没有随着 x 的优化而相应变化。
import torch
import numpy as np
import torch.optim as optim
torch.autograd.set_detect_anomaly(True)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
x = torch.tensor([1.0, 2.0, 3.0,4.5], dtype=torch.float32, requires_grad=True, device=device)
y_GT= torch.tensor([10, -20, -30,45], dtype=torch.float32, device=device)
print(f'x{x}')
optimizer = optim.Adam([x], lr=1)
y1 = 0.5*x*2-1.2*x
y2 = x**3
for i in range(10):
print(f'{i}: x{x}')
optimizer.zero_grad()
loss = (y1+y2-y_GT).mean()
loss.backward()
optimizer.step()
print(f'{i}: x{x}')
正确代码
import torch
import numpy as np
import torch.optim as optim
torch.autograd.set_detect_anomaly(True)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
x = torch.tensor([1.0, 2.0, 3.0,4.5], dtype=torch.float32, requires_grad=True, device=device)
y_GT= torch.tensor([10, -20, -30,45], dtype=torch.float32, device=device)
print(f'x{x}')
optimizer = optim.Adam([x], lr=1)
for i in range(10):
print(f'{i}: x{x}')
optimizer.zero_grad()
y1 = 0.5*x*2-1.2*x
y2 = x**3
loss = (y1+y2-y_GT).mean()
loss.backward()
optimizer.step()
print(f'{i}: x{x}')