学了第三讲之后,手敲了课上的代码,现附在下面,供以后参考学习:
一、随机梯度下降算法
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]
epoch_list = []
loss_list = []
w = 1.0
def forward(x):
return x * w
def loss(x , y):
y_pred = forward(x)
return (y_pred - y) **2
def gradient(x , y):
return 2 * x * (x * w - y)
print('Predict (before training)', 4, forward(4))
for epoch in range(100):
for x, y in zip(x_data, y_data):
grad = gradient(x, y)
w = w - 0.01 * grad
print("\tgrad:", x, y, grad)
l = loss(x, y)
print("progress:", epoch, "w=", w, "loss=", l)
epoch_list.append(epoch)
loss_list.append(l)
print('Predict(after training)', 4, forward(4))
plt.plot(epoch_list, loss_list)
plt.ylabel('cost')
plt.xlabel('epoch')
plt.show()
二、梯度下降算法
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]
epoch_list = []
loss_list = []
w = 1.0
def forward(x):
return x * w
def cost(xs , ys):
cost = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
cost += (y_pred - y) ** 2
return cost / len(xs)
def gradient(xs , ys):
grad = 0
for x, y in zip(xs, ys):
grad += 2 * x * (x * w - y)
return grad / len(xs)
print('Predict (before training)', 4, forward(4))
for epoch in range(100):
cost_val = cost(x_data, y_data)
grad_val = gradient(x_data, y_data)
w = w - 0.01 * grad_val
print("Epoch:", epoch, 'w=', w, 'loss=', cost_val)
epoch_list.append(epoch)
loss_list.append(cost_val)
print('Predict(after training)', 4, forward(4))
plt.plot(epoch_list, loss_list)
plt.ylabel('cost')
plt.xlabel('epoch')
plt.show()