PRP共轭梯度法对于正定的二次函数而言和FR共轭梯度法效果是近似的,而对于一般的函数来说的话,PRP算法一般优于FR算法
from 实用优化算法.helper import* # 单独博客介绍
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
x1,x2,a = symbols('x1,x2,a')
k = 0
def get_f():
# f = pow(1 - x1,2) + 2*pow(x2 - pow(x1,2),2)
# f = (3/2) * pow(x1,2) + pow(x2,2)/2 - x1*x2 - 2*x1
f = 100 * (x2 - x1 ** 2) ** 2 + (1 - x1) ** 2 # Rosenbrock函数
return f
def do_work(x):
g = get_grad(x,get_f())
g1 = g
d = -g
k = 0
cnt = 0
steps_x1 = [x[0][0]]
steps_x2 = [x[1][0]]
while get_len_grad(g1) >= 0.00001:
bt = get_biet_PRP(k,g,g1)
d = -g1 + bt * d
step = non_accuracy_search(x,d,get_f()) # Armijo 非精确搜索
# step = golden_search(0,2,x,d,get_f()) # 调用实验一的黄金分割法进行精确搜索
next_x = x + step*d
steps_x1.append(next_x[0][0])
steps_x2.append(next_x[1][0])
k += 1
g = g1
g1 = get_grad(next_x,get_f())
x = next_x
cnt += 1
print(cnt,' ',x,',',end = '\n\n')
print('\n','最终结果x*:',x,'\n','f(x*):',get_f().subs(x1,x[0][0]).subs(x2,x[1][0]))
return steps_x1,steps_x2
if __name__ == '__main__':
x0 = [[0],[0]]
x0 = np.array(x0)
step_x1,step_x2 = do_work(x0)
fig = plt.figure()
ax = Axes3D(fig)
x = np.arange(-3, 3, 0.1)
y = np.arange(-3, 3, 0.1)
X1, X2 = np.meshgrid(x, y)
plt.figure(figsize=(10, 6))
Z = (X1 ** 4)/4 + (X2 ** 2)/2 - X1*X2 + X1 - X2
step_x1 = np.array(step_x1)
step_x2 = np.array(step_x2)
step_x3 = (step_x1 ** 4) / 4 + (step_x2 ** 2) / 2 - step_x1 * step_x2 + step_x1 - step_x2
bar = plt.contourf(X1, X2, Z, 5, cmap=plt.cm.Blues)
plt.plot(step_x1, step_x2, marker='*')
ax.plot_surface(X1, X2, Z, rstride=1, cstride=1)
ax.plot(step_x1, step_x2, step_x3, c='r',marker = '*')
plt.colorbar(bar)
plt.show()