code
import numpy as np
from scipy.optimize import minimize
# optimize 1
# f(x) = 2xy + 2x - x^2 - 2y^2; x^3 - y = 0; y - 1 >=0
def func(x, sign=1.0):
return sign * (2*x[0]*x[1] + 2*x[0] -x[0]**2 -2*x[1]**2)
def func_deriv (x, sign=1.0):
dfdx0 = sign * (-2*x[0] + 2*x[1] + 2)
dfdx1 = sign * (-4*x[1] + 2*x[0])
return np.array([dfdx0, dfdx1])
cons = (
{ 'type':'eq',
'fun': lambda x: np.array([x[0] ** 3 - x[1]]),
'jac': lambda x: np.array([3*x[0]**2, -1])
},
{'type':'ineq',
'fun': lambda x: np.array([x[1] - 1]),
'jac': lambda x: np.array([0, 1])}
)
res = minimize( func, [-1.0, 1.0], args=(-1.0,), jac = func_deriv,
method='SLSQP', options={'disp':True})
print(res)
res2 = minimize(func, [-1.0, 1.0], args=(-1.0,), jac = func_deriv,
constraints=cons, method='SLSQP', options={'disp':True})
print(res2)
# # optimize2:
# f(x)=(x1-2)^2 + (x2-1)^2; x1+x2+5=0;
def f(x):
return (x[0] - 2)**2 + (x[1] - 1)**2
def f_deriv(x):
return np.array( [ 2*(x[0] - 2), 2*(x[1] - 1)])
# #constraints sign:增加了符号sign,反而无法收敛;原理未知; 等式优化不用sign,不等式需要
# cons2 = ({'type':'eq',
# 'fun':lambda x: np.array([x[0] + x[1] + 5]),
# 'jac': lambda x: np.array([1.0, 1.0])})
# res3 = minimize(f, [-10.0, 10.0], args=(-1.0,), jac=f_deriv,
# method='SLSQP', options={'disp':True})
# print(">>"*10, "\n", res3)
#constraints
cons2 = ({'type':'eq',
'fun':lambda x: np.array([x[0] + x[1] + 5])}
)
res3 = minimize(f, [-10.0, 10.0], jac=f_deriv,
method='SLSQP', options={'disp':True})
print(">>"*10, "\n", res3)
# optimize3, optimization with equal constraint
#f(x) = x1^2*x2; x1^2 +x2^2 - 1= 0;
def f2(x):
return x[0] ** 2 * x[1]
def f2_der(x):
return np.array([2*x[0]*x[1], x[0]**2])
cons3 = ({
'type': 'eq',
'fun': lambda x: np.array([x[0] ** 2 + x[1] **2 -1])
})
res4 = minimize(f2, [-1.0,1.0], jac=f2_der, constraints = cons3,
method='SLSQP', options={'disp':True})
print(">>"*10, "output: \n", res4)
不等式约束,带一个sign,并且约束中要求导;
results
Optimization terminated successfully (Exit mode 0)
Current function value: -2.0
Iterations: 4
Function evaluations: 5
Gradient evaluations: 4
fun: -2.0
jac: array([-0., -0.])
message: ‘Optimization terminated successfully’
nfev: 5
nit: 4
njev: 4
status: 0
success: True
x: array([2., 1.])
Optimization terminated successfully (Exit mode 0)
Current function value: -1.0000001831052137
Iterations: 9
Function evaluations: 14
Gradient evaluations: 9
fun: -1.0000001831052137
jac: array([-1.99999982, 1.99999982])
message: ‘Optimization terminated successfully’
nfev: 14
nit: 9
njev: 9
status: 0
success: True
x: array([1.00000009, 1. ])
Optimization terminated successfully (Exit mode 0)
Current function value: 0.0
Iterations: 2
Function evaluations: 3
Gradient evaluations: 2
fun: 0.0
jac: array([0., 0.])
message: ‘Optimization terminated successfully’
nfev: 3
nit: 2
njev: 2
status: 0
success: True
x: array([2., 1.])
Optimization terminated successfully (Exit mode 0)
Current function value: 9.866059393255199e-15
Iterations: 7
Function evaluations: 10
Gradient evaluations: 7
output:
fun: 9.866059393255199e-15
jac: array([1.21298674e-04, 3.67794679e-09])
message: ‘Optimization terminated successfully’
nfev: 10
nit: 7
njev: 7
status: 0
success: True
x: array([-9.93280391e-08, 1.00000000e+00])
Process finished with exit code 0