import numpy as np
from matplotlib import pyplot as plt
theta_history = []
#损失函数
def J(theta):
try:
return (theta - 2.5)**2 - 1.0
except:
return float('inf')
#导数
def dJ(theta):
return 2*(theta - 2.5)
def gradient_descent(initial_theta, eta, n_iters=1e4,epsilon = 1e-8):
theta = initial_theta
i_iters = 0
while i_iters < n_iters:
gradient = dJ(theta)
last_theta = theta
theta = theta - gradient * eta
theta_history.append(theta)
if (abs(J(theta) - J(last_theta))) < epsilon:
break;
i_iters = i_iters + 1
if __name__ == '__main__':
plot_x = np.linspace(-1, 6, 150)
plot_y_ = (plot_x - 2.5) ** 2 - 1
theta = 0.0
eta = 0.1
gradient_descent(theta, eta, n_iters=100)
plt.plot(plot_x, plot_y_)
plt.plot(np.array(theta_history), J(np.array(theta_history)),color="r",marker='+')
plt.show()
print(theta)
day04-机器学习-梯度下降
最新推荐文章于 2024-10-12 12:26:23 发布