In[1]:
import numpy as np
import matplotlib.pyplot as plt
# In[2]:
plot_x = np.linspace(-1, 6, 141)
plot_x
# In[3]:
plot_y = (plot_x - 2.5) ** 2 -1
plot_y
# In[4]:
plt.plot(plot_x, plot_y)
plt.show()
# In[5]:
def dJ(thetha):
return 2 * (thetha - 2.5)
# In[6]:
def J(thetha):
return (thetha - 2.5) ** 2 -1
# In[7]:
eta = 0.1
epsilon = 1e-8
thetha = 0.0
while True:
gradient = dJ(thetha)
last_thetha = thetha
thetha = thetha - eta * gradient
if (abs(J(thetha) - J(last_thetha)) < epsilon):
break
print(thetha, J(thetha))
# In[8]:
eta = 0.1
epsilon = 1e-8
thetha = 0.0
thetha_history = [thetha]
while True:
gradient = dJ(thetha)
last_thetha = thetha
thetha = thetha - eta * gradient
thetha_history.append(thetha)
if (abs(J(thetha) - J(last_thetha)) < epsilon):
break
plt.plot(plot_x, plot_y, color='b')
plt.plot(np.array(thetha_history), J(np.array(thetha_history)), color='r',marker='+')
plt.show()
print(thetha, J(thetha))
# In[9]:
eta = 0.01
epsilon = 1e-8
thetha = 0.0
thetha_history = [thetha]
while True:
gradient = dJ(thetha)
last_thetha = thetha
thetha = thetha - eta * gradient
thetha_history.append(thetha)
if (abs(J(thetha) - J(last_thetha)) < epsilon):
break
plt.plot(plot_x, plot_y, color='b')
plt.plot(np.array(thetha_history), J(np.array(thetha_history)), color='r',marker='+')
plt.show()
print(thetha, J(thetha))
机器学习-梯度下降算法
最新推荐文章于 2024-05-08 19:48:24 发布