# 梯度下降算法
x_data = [1.0, 2.0, 3.0]
y_data = [1.5, 3.0, 4.5]
#参数
w = 1.0
# 学习率
lr = 0.01
def forward(x):
'''预测值'''
return x*w
def cost(xs, ys):
'''计算损失和'''
cost = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
cost += (y_pred-y) ** 2
return cost
def gradient(xs, ys):
'''计算梯度下降值'''
grad = 0
for x, y in zip(xs, ys):
grad += 2*x*(forward(x)-y)
return grad / len(xs)
for epoch in range(100000):
'''lr 为学习率 即步长'''
cost_val = cost(x_data, y_data)
grad_val = gradient(x_data, y_data)
w -= lr * grad_val
print("Epoch:", epoch, ' w=', w, ' loss=', cost_val)
print("Predict (after traning) ", 100, forward(100))