第四讲-反向传播
y=x*w
import torch
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w = torch.tensor([1.0])
w.requires_grad = True
def forward(x):
return x * w
def loss(x,y):
y_pred = forward(x)
return (y-y_pred)** 2
print('predict (before training)',4,forward(4).item())
for epoch in range(100):
for x,y in zip(x_data,y_data):
l=loss(x,y)
l.backward()
print('\tgrad:',x,y,w.grad.item())
w.data = w.data - 0.01 *w.grad.data
w.grad.data.zero_() #后向传播会计算grad,因此更新完后要重新置零
print('progress:',epoch,l.item())
print('predict(after training)',4,forward(4).item())
y=w1*x^2+w2*x+b
import torch
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w1 = torch.tensor([1.0])
w2 = torch.tensor([1.0])
b = torch.tensor([-2.0])
w1.requires_grad = True
w2.requires_grad = True
b.requires_grad = True
def forward(x):
return x* x * w1 + w2 * x + b
def loss(x,y):
y_pred = forward(x)
return (y-y_pred) ** 2
print('predict (before training)',4,forward(4).item())
for epoch in range(100):
for x,y in zip(x_data,y_data):
l=loss(x,y)
l.backward()
print('\tgrad:',x,y,w1.grad.item(),w2.grad.item())
w1.data = w1.data - 0.01 * w1.grad.data
w2.data = w2.data - 0.01 * w2.grad.data
b.data = b.data - 0.05 * b.grad.data
w1.grad.data.zero_() #后向传播会计算grad,因此更新完后要重新置零
w2.grad.data.zero_()
b.grad.data.zero_()
print('progress:',epoch,l.item())
print('predict(after training)',4,forward(4).item())