啥也别说了,最优化太心酸了,可能是每个计算机专业人的噩梦(主要是数学渣)。直接上代码。
# 向前查找搜索区间,接受线搜索方向的导数f为唯一参数,返回搜索区间左右端点left,right
def find_interval(f):
left = 0
step = 1
coefficient = 2
right = left + step
while f(right) < 0:
left = right
step = step*coefficient
right = left + step
return left, right
# 实现用二分法精确线搜索,接受线搜索方向的导数和搜索区间左右端点为三个参数,返回
# 绝对值不超过10**-6的近似极小点mid
def find_root(f, left,right):
limit = 1e-6
mid = (left + right)/2
while abs(f(mid)) >= limit:
if f(mid) < 0:
left = mid
elif f(mid) >= 0:
right = mid
mid = (left + right)/2
return mid
# 接收待优化函数的梯度g和初始值x(numpy数组类型)为两个参数,返回梯度向量长度
# (norm(g(x))不超过1e-5的近似极小值点(调用norm 函数计算向量长度,测试时
# 可以使用numpy库中的linalg模块的norm函数实现)
def grad_min(g, x):
import numpy
while numpy.linalg.norm(g(x)) > 1e-5:
d=-g(x)
d=-g(x)/numpy.linalg.norm(g(x))
h = lambda y:numpy.dot(g(x+y*d),d) # d搜索方向,h为线搜索方向的导数
x = x+find_root(h,find_interval(h)[0], find_interval(h)[1])*d
return x
import numpy
from sympy import lambdify, symbols, Matrix, MatrixSymbol, diff
G = MatrixSymbol('G', 2, 2)
b = MatrixSymbol('b', 2, 1)
c = MatrixSymbol('c', 1, 1)
ps = [c, b,G]
xs = symbols('x1:3')
x = Matrix(xs)
quad = (x.T*G*x/2+b.T*x+c)[0,0]
expr=lambda y:quad.subs([(p, Matrix(y.pop())) for p in ps]).simplify()
fexpr=expr([[[21,4],[4,1]],[2,3],[10]])
gexpr=[diff(fexpr,x) for x in xs]
glambdify=[lambdify([xs],f,"numpy") for f in gexpr]
g = lambda y:numpy.array([f(y) for f in glambdify]) # 计算梯度
x = numpy.array([1,1])
grad_min(g,x)
# 运行结果 array([ 1.99999231, -10.9999606 ])