pytorch 入门学习反向传播
反向传播
import numpy as np
import matplotlib.pyplot as plt
import torch
def forward(x):
return x * w
def loss(x,y):
y_pred = forward(x)
return (y_pred - y) **2
x_data = [1.0, 2.0, 3.0]
y_data = [2.0 ,4.0, 6.0]
w = torch.Tensor([1.0])
w.requires_grad = True
print("predict (before training)",4,forward(4).item())
for epoch in range(10):
for x,y in zip(x_data,y_data):
l = loss(x,y)
l.backward() #反向传播,计算梯度
print('\tgrad:',x,y,w.grad.item())
w.data = w.data - 0.01*w.grad.data
w.grad.data.zero_() #清空梯度
print("progress:",epoch,l.item())
print("predict (after training)",4,forward(4).item())
#out:
predict (before training) 4 4.0
grad: 1.0 2.0 -2.0
grad: 2.0 4.0 -7.840000152587891
grad: 3.0 6.0 -16.228801727294922
progress: 0 7.315943717956543
grad: 1.0 2.0 -1.478623867034912
grad: 2.0 4.0 -5.796205520629883
grad: 3.0 6.0 -11.998146057128906
progress: 1 3.9987640380859375
grad: 1.0 2.0 -1.0931644439697266
grad: 2.0 4.0 -4.285204887390137
grad: 3.0 6.0 -8.870372772216797
progress: 2 2.1856532096862793
grad: 1.0 2.0 -0.8081896305084229
grad: 2.0 4.0 -3.1681032180786133
grad: 3.0 6.0 -6.557973861694336
progress: 3 1.1946394443511963
grad: 1.0 2.0 -0.5975041389465332
grad: 2.0 4.0 -2.3422164916992188
grad: 3.0 6.0 -4.848389625549316
progress: 4 0.6529689431190491
grad: 1.0 2.0 -0.4417421817779541
grad: 2.0 4.0 -1.7316293716430664
grad: 3.0 6.0 -3.58447265625
progress: 5 0.35690122842788696
grad: 1.0 2.0 -0.3265852928161621
grad: 2.0 4.0 -1.2802143096923828
grad: 3.0 6.0 -2.650045394897461
progress: 6 0.195076122879982
grad: 1.0 2.0 -0.24144840240478516
grad: 2.0 4.0 -0.9464778900146484
grad: 3.0 6.0 -1.9592113494873047
progress: 7 0.10662525147199631
grad: 1.0 2.0 -0.17850565910339355
grad: 2.0 4.0 -0.699742317199707
grad: 3.0 6.0 -1.4484672546386719
progress: 8 0.0582793727517128
grad: 1.0 2.0 -0.1319713592529297
grad: 2.0 4.0 -0.5173273086547852
grad: 3.0 6.0 -1.070866584777832
progress: 9 0.03185431286692619
predict (after training) 4 7.804864406585693
Process finished with exit code 0