梯度下降算法
import matplotlib.pyplot as plt
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w = 1.0
def forward(x):
return w * x
def loss(x, y):
y_pred = forward(x)
return (y_pred - y)**2
def cost(xs, ys):
c = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
loss_val = loss(x, y)
c += loss_val
return c / len(xs)
def gradient(xs, ys):
grad = 0
for x, y in zip(xs, ys):
grad += 2 * x * (x * w - y)
return grad / len(xs)
epoch_list = []
cost_list = []
for epoch in range(100):
cost_val = cost(x_data, y_data)
grad = gradient(x_data, y_data)
w -= 0.01 * grad
print("Epoch:", epoch, "w:", w, "cost:", cost_val)
epoch_list.append(epoch)
cost_list.append(cost_val)
print('Predict:', 4, forward(4))
plt.plot(epoch_list, cost_list)
plt.xlabel('epoch')
plt.ylabel('loss')
plt.show()
随机梯度下降:损失值不再使用均值
import matplotlib.pyplot as plt
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w = 1.0
def forward(x):
return w * x
def loss(x, y):
y_pred = forward(x)
return (y_pred - y)**2
def cost(xs, ys):
c = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
loss_val = loss(x, y)
c += loss_val
return c / len(xs)
def gradient(x, y):
return 2 * x * (x * w - y)
epoch_list = []
loss_list = []
for epoch in range(100):
for x, y in zip(x_data, y_data):
loss_val = loss(x, y)
grad = gradient(x, y)
w -= 0.01 * grad
print("Epoch:", epoch, "w:", w, "loss:", loss_val)
epoch_list.append(epoch)
loss_list.append(loss_val)
print('Predict:', 4, forward(4))
plt.plot(epoch_list,loss_list)
plt.ylabel('loss')
plt.xlabel('epoch')
plt.show()