import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]
def forward(x):
return x * w + b
def grad_get(x,y):
grad_w = 2 * x * ( forward(x) - y )
grad_b = 2 * ( forward(x) - y )
return grad_w,grad_b
def loss(x,y):
return ( forward(x) - y ) * (forward(x) - y)
def cost(xs,ys):
loss = 0
for x,y in zip(x_data,y_data):
loss += (forward(x) - y) * (forward(x) - y)
return loss / 3
w = 4
b = 1
alf = 0.02
epoch_list = []
loss_list = []
for epoch in range(100):
for x,y in zip(x_data,y_data):
grad_w , grad_b = grad_get(x,y)
w -= alf * grad_w
b -= alf * grad_b
l = loss(x,y)
print("epoch=",epoch, "w=" , w,"b=",b,"loss=",l)
epoch_list.append(epoch)
loss_list.append(cost(x_data,y_data))
plt.plot(epoch_list,loss_list)
plt.title("cost of epoch")
plt.xlabel("epoch")
plt.ylabel("cost")
plt.show()
第三讲,刘二大人,随机梯度下降,random_gradientDescent
最新推荐文章于 2024-06-14 11:20:52 发布