PyTorch反向传播学习实例(p4)刘二
import torch
x_da = [1.0,2.0,3.0]
y_da = [2.0,4.0,6.0]
w1 = torch.tensor([1.0])
w1.requires_grad= True
w2= torch.tensor([1.0])
w2.requires_grad=True
b = torch.tensor([1.0])
b.requires_grad=True
def dors(x):
return w1 *x *x + w2 *x +b
def Loss(xs , ys ):
return (dors(xs)-ys)**2
for i in range(100):
for g,h in zip(x_da,y_da):
print(Loss(g,h))
l= Loss(g,h)
l.backward()
w1.data = w1.data - 0.01*w1.grad.data
w2.data = w2.data - 0.01*w2.grad.item()
b.data = b.data - 0.01*b.grad.item()
w1.grad.data.zero_()
w2.grad.data.zero_()
b.grad.data.zero_()
print(dors(4))
```![在这里插入图片描述](https://img-blog.csdnimg.cn/10a43d9734e94c46b9043d32a155628d.JPG?x-oss-process=image/watermark,type_ZHJvaWRzYW5zZmFsbGJhY2s,shadow_50,text_Q1NETiBAY29yYysr,size_20,color_FFFFFF,t_70,g_se,x_16#pic_center)
**对b调整**
import torch
x_da = [1.0,2.0,3.0]
y_da = [2.0,4.0,6.0]
w1 = torch.tensor([1.0])
w1.requires_grad= True
w2= torch.tensor([1.0])
w2.requires_grad=True
b = torch.tensor([0.000000003])
b.requires_grad=True
def dors(x):
return w1 *x *x + w2 *x +b
def Loss(xs , ys ):
return (dors(xs)-ys)**2
for i in range(100):
for g,h in zip(x_da,y_da):
print(Loss(g,h))
l= Loss(g,h)
l.backward()
w1.data = w1.data - 0.01*w1.grad.data
w2.data = w2.data - 0.01*w2.grad.item()
b.data = b.data - 0.01*b.grad.item()
w1.grad.data.zero_()
w2.grad.data.zero_()
b.grad.data.zero_()
print(dors(4))
![通过对b调参,影响不大](https://img-blog.csdnimg.cn/a46188d111864eedb52064946eca39db.JPG?x-oss-process=image/watermark,type_ZHJvaWRzYW5zZmFsbGJhY2s,shadow_50,text_Q1NETiBAY29yYysr,size_20,color_FFFFFF,t_70,g_se,x_16#pic_center)