细节知识和上一篇一样,只不过这个多了一个3D画图,
#梯度下降 == 导数值下降
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
#梯度下降算法是一个方法,是帮助我们找极值点的方法
def targetFunc(x1, x2):
return (x1+x2)**2
pass
def gradientFunc(x1, x2):
a = x1 + (2*x1*x2)**0.5
return a
pass
def gradientFunc2(x1,x2):
b = x2 + (2*x1*x2)**0.5
return b
pass
listx = []
#猜测的过程 猜的值 目标函数 梯度函数 步进系数 收敛条件
def gradientCal(initX1, initX2, targetFunc, gradientFunc, rating=0.1, tolent=0.000001, times = 500000):
result = targetFunc(initX1,initX2) #计算出initX1,initX2这个点的实际值
listx.append((initX1, initX2))
newX1 = initX1 - rating * gradientFunc(initX1,initX2)
newX2 = initX2 - rating * gradientFunc2(initX1,initX2)
newResult = targetFunc(newX1, newX2)
reResult = np.abs(result - newResult)
t = 0
while reResult > tolent and t < times:
t += 1
initX1 = newX1
initX2 = newX2
result = newResult
listx.append((initX1,initX2))
newX1 = newX1 - rating*gradientFunc(newX1,newX2)
newX2 = newX2 - rating*gradientFunc2(newX2,newX1)
newResult1 = targetFunc(newX1,newX2)
reResult = np.abs(result - newResult)
pass
return initX1, initX2
pass
if __name__ == '__main__':
print(gradientCal(10, 10, targetFunc, gradientFunc2))
xs, ys = np.meshgrid(np.linspace(-50, 50, 1000), np.linspace(-50, 50, 1000))
print(xs)
print(xs.shape)
print(ys.shape)
z = targetFunc(xs, ys)
print(z)
result = []
for row1, row2 in zip(xs, ys):
row = []
for x1, x2 in zip(row1, row2):
row.append(targetFunc(x1, x2))
pass
result.append(row)
pass
print(np.array(result))
# 画图
ax = plt.figure().add_subplot(111, projection='3d')
ax.plot_surface(xs, ys, z)
plt.show()
运行效果图