import numpy as np
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]
w = 1
def forward(x,w):
return x*w
def cost(xs,ys,w):
cost = 0
for x,y in zip(xs,ys):
y_pred = forward(x,w)
cost += (y_pred - y)**2
return cost/len(xs)
def gradient(xs,ys,w):
grad = 0
for x,y in zip(xs,ys):
grad += 2*x*(x*w-y)
return grad/len(xs)
if __name__ == '__main__':
print("Predict (before training)",4,forward(4,w))
cost_list = []
w_list = []
grad_list = []
for epoch in range(100):
cost_val = cost(x_data,y_data,w)
grad_val = gradient(x_data,y_data,w)
w -= 0.01*grad_val
cost_list.append(cost_val)
grad_list.append(grad_val)
w_list.append(w)
print('Epoch:',epoch+1," ",'w=',w," ",'loss=',cost_val)
print("Predict (after training)",4,forward(4,w))
plt.plot(w_list,cost_list)
plt.xlabel("w")
plt.ylabel("loss")
plt.show()
plt.plot(w_list,grad_list)
plt.xlabel("w")
plt.ylabel("grad")
plt.show()
pytorch-03.梯度下降算法
于 2022-07-19 18:09:49 首次发布