话不多说,直接上代码
import numpy as np
import matplotlib.pylab as plt
def numerical_gradient(f, x):
h = 1e-4
grad = np.zeros_like(x) #生成和x形状相同的数组
for idx in range (x.size):
tem_val = x[idx]
x[idx] = tem_val + h
fxh1 = f(x)
x[idx] = tem_val - h
fxh2 = f(x)
grad[idx] = (fxh1 - fxh2) / (2 * h)
x[idx] = tem_val
return grad
def gradient_descent(f, init_x, lr, step_num):
x = init_x
x1 = np.empty((step_num, init_x.size))
for i in range (step_num):
x1[i] = x.copy()
grad = numerical_gradient(f, x)
x -= lr * grad
return x, x1
def function_2(x):
return x[0]**2 + x[1]**2
if __name__ == '__main__':
init_x = np.array([-3.0, 4.0])
result, result_1= gradient_descent(function_2, init_x=init_x, lr=0.1, step_num=100)
print(result)
print(result_1)
plt.plot(result_1[:,0], result_1[:,1], 'o')
plt.xlim(-3.5, 3.5)
plt.ylim(-4.5, 4.5)
plt.xlabel('x0')
plt.ylabel('x1')
plt.show()
以下是可视化结果:
如图,可以清楚地看到梯度下降的过程。