python实现cma-es优化算法

测试目标函数

cma-es方法求函数
f ( x 1 , x 2 ) = 10 ( x 1 + x 2 − 5 ) 2 + ( x 1 − x 2 ) 2 f(x_1,x_2)=10(x_1+x_2-5)^2+(x_1-x_2)^2 f(x1,x2)=10(x1+x25)2+(x1x2)2的极小值

代码

from cmaes import CMA
import numpy as np
import math


def func(x1,x2):
    return 10*math.pow((x1+x2-5),2)+math.pow((x1-x2),2)

if __name__ == '__main__':
    op1 = None
    op2 = None

    optimizer = CMA(mean=np.zeros(2),sigma=1.5,bounds=np.array([[2,3],[2,3]]))
    for generation in range(50):
        solutions = []
        for _ in range(optimizer.population_size):
            x = optimizer.ask()
            value = func(x[0],x[1])
            solutions.append((x,value))
            print(f'#{generation}: {value}, x1={x[0]}, x2={x[1]}')
            if value<1e-4:
                op1 = x[0]
                op2 = x[1]
                break
        if op1!=None:
            break
        optimizer.tell(solutions)

    print(f'op1 = {op1}, op2 = {op2}')

结果

#0: 10.0, x1=2.0, x2=2.0
#0: 10.0, x1=2.0, x2=2.0
#0: 6.534076900378765, x1=2.193994857609466, x2=2.0
#0: 10.0, x1=2.0, x2=2.0
#0: 0.06335566347967159, x1=2.376519710481994, x2=2.574212375227286
#0: 0.3442059362928029, x1=2.360081782467732, x2=2.456935919932541
#1: 3.015856361818722, x1=2.5481285065342933, x2=2.9835016808084815
#1: 1.0402535327830014, x1=2.764233382822427, x2=2.016437120209082
#1: 0.15058582430743594, x1=2.701000460480677, x2=2.3538168242217394
#1: 0.7868521946585273, x1=2.4251776547005615, x2=2.2972461013043586
#1: 0.9262149571480615, x1=2.910581469401808, x2=2.3328102727474884
#1: 1.9883736927416178, x1=2.134014887218789, x2=2.4300078349765917
#2: 0.82022805460328, x1=2.9748246921767434, x2=2.105637222300638
#2: 0.08856697756923511, x1=2.474107423495605, x2=2.609671643067113
#2: 0.2646313083431093, x1=2.7472189172803745, x2=2.2356545315164955
#2: 0.2187844565601368, x1=2.2564488730243575, x2=2.682520361709355
#2: 0.10165301794553384, x1=2.6009975773248213, x2=2.3407554767868857
#2: 1.969605825329867, x1=2.2703902188512872, x2=2.2858342401896863
#3: 0.8353676431824137, x1=2.9636814187607285, x2=2.1903952147038095
#3: 0.05753740324804603, x1=2.619222760925894, x2=2.379393142223122
#3: 0.44682577699652737, x1=2.6743916001686836, x2=2.5321496823414624
#3: 0.2498444545897333, x1=2.7620093589880907, x2=2.2900195613480694
#3: 3.1188697293697363, x1=2.705789911831616, x2=2.8507928648499568
#3: 0.8525997211104847, x1=2.5675832587399805, x2=2.1690251744679365
#4: 0.4959335838977355, x1=2.8539594975415983, x2=2.1498362806416202
#4: 1.6442749822921043, x1=2.6025739442057505, x2=2.7981771468004744
#4: 1.9615450460338173, x1=2.1556984027146813, x2=2.4086941845368677
#4: 1.6016747715075708, x1=2.3811646294272175, x2=2.9726465987053103
#4: 1.8114068090424973, x1=2.5955382415601433, x2=2.0197677105676406
#4: 0.6414232216808395, x1=2.3007703949301175, x2=2.875584815741031
#5: 1.4800459968433166, x1=2.1467559799992815, x2=2.483567990715536
#5: 1.2720906127185203, x1=2.397430091053399, x2=2.918833840332442
#5: 3.6417601887471154, x1=2.883197235310154, x2=2.7180076028331555
#5: 4.108580048540381, x1=2.2281967591176906, x2=2.131550205572963
#5: 4.079206830993574, x1=2.876233518038383, x2=2.761420367023698
#5: 0.9356685819334423, x1=2.696627918066794, x2=2.07027362918555
#6: 1.081497314368821, x1=2.333854985524522, x2=2.337285539065201
#6: 3.3037064127220477, x1=2.060426247004182, x2=2.3733788126831183
#6: 2.465420725941423, x1=2.8844307942372702, x2=2.604123018217866
#6: 1.956939271972105, x1=2.582260296752602, x2=2.851822147121573
#6: 3.289756464683478, x1=2.2500694280671008, x2=2.1768343248383513
#6: 0.1308506764954115, x1=2.686469417393113, x2=2.3674596538684565
#7: 2.3250950827961456, x1=2.8481412529242633, x2=2.629047714269651
#7: 0.20693568350350128, x1=2.519395579917585, x2=2.3474269205893177
#7: 0.2644805637415131, x1=2.7685055583559612, x2=2.2948240482393216
#7: 0.7430600495400986, x1=2.7899468723400718, x2=2.4621684494989386
#7: 1.920359663261277, x1=2.4457077548694164, x2=2.127763128010452
#7: 0.7418704950447988, x1=2.8922703315483673, x2=2.3077941925356296
#8: 2.587054895063614, x1=2.4000435996417044, x2=2.100239255344807
#8: 0.316319604172951, x1=2.773642531031088, x2=2.2128450968600157
#8: 0.5261465682178328, x1=2.876563502910547, x2=2.160992243442791
#8: 0.5313395354165122, x1=2.426736841047016, x2=2.3442362631391136
#8: 0.9541331224781457, x1=2.2483977046117376, x2=2.962395946423536
#8: 0.33559048969298005, x1=2.6474113347794463, x2=2.5321150583600596
#9: 0.7338641238267016, x1=2.8880557629680355, x2=2.3126305451029485
#9: 0.14746128690044774, x1=2.5533005068486885, x2=2.344737386580146
#9: 0.07946973210124161, x1=2.6407003613704756, x2=2.410962037632292
#9: 0.4426816268713762, x1=2.840038147565921, x2=2.176838862022909
#9: 0.1726432543724588, x1=2.715114572325701, x2=2.3439360476428943
#9: 0.39530195764123677, x1=2.7963469883981125, x2=2.174440188748402
#10: 0.4337099260686925, x1=2.718704843812967, x2=2.167384668999698
#10: 0.2381483820789981, x1=2.4414637486486344, x2=2.6911313084474386
#10: 0.011747625524323773, x1=2.444048479778901, x2=2.5400282192719614
#10: 0.05724842388558135, x1=2.535045164920554, x2=2.4020562156524985
#10: 0.04947550567446757, x1=2.596232746954514, x2=2.383364629775309
#10: 1.349065962168933, x1=2.8541714592737963, x2=2.4951363972681886
#11: 0.4386921220137742, x1=2.505823891521909, x2=2.2955605389524325
#11: 0.3990949509865823, x1=2.6885071121870605, x2=2.1891327023728615
#11: 0.11152710803387075, x1=2.668121083131221, x2=2.334248408076757
#11: 1.7127175226612674, x1=2.378096333474768, x2=2.211423731988234
#11: 0.07891279388053785, x1=2.4252514273245227, x2=2.6341430777080133
#11: 0.8631980811798495, x1=2.3063518677567894, x2=2.4013868743414313
#12: 0.1215832197249647, x1=2.512578303013577, x2=2.3848243538449965
#12: 0.01794980192778644, x1=2.5246720630176442, x2=2.441990843682251
#12: 0.17728425739526243, x1=2.624997926533472, x2=2.5023790182947008
#12: 0.1900709965888348, x1=2.2891379041340607, x2=2.62172267208869
#12: 0.1317466161568503, x1=2.471402124079702, x2=2.4152011952982644
#12: 0.014299069431282084, x1=2.549892969025236, x2=2.4373380689740647
#13: 0.07494688829501794, x1=2.4493154526845657, x2=2.46424144284036
#13: 0.008608671935873103, x1=2.5180198466045662, x2=2.511242307974324
#13: 0.002101537602758445, x1=2.477621338445377, x2=2.523358939536232
#13: 0.1253744015014866, x1=2.3587801970399287, x2=2.546225838394624
#13: 0.0007931066586420569, x1=2.5110329800069615, x2=2.496617517325351
#13: 0.29681430186201274, x1=2.4431137228600477, x2=2.385567076422179
#14: 0.33493948130010487, x1=2.283242238566705, x2=2.7994831140964327
#14: 0.02402833436232993, x1=2.4323615654892414, x2=2.529417356937374
#14: 0.23413212248448917, x1=2.528319030301766, x2=2.621811349293453
#14: 0.02447496459989463, x1=2.4317273682917753, x2=2.5297037380696374
#14: 0.13017017904955874, x1=2.3598461705714646, x2=2.6877470785357143
#14: 0.05705493406043433, x1=2.3748849334704367, x2=2.5989014832270616
#15: 0.04385090518244558, x1=2.4405874151705977, x2=2.495510846278926
#15: 0.17091186040217496, x1=2.3492668070956872, x2=2.533736230442824
#15: 0.09020731121953468, x1=2.470224789515952, x2=2.435436927749388
#15: 0.09874529851251285, x1=2.5392974206077943, x2=2.5598602658852894
#15: 0.0033412487589541935, x1=2.528141192392836, x2=2.4704980480254317
#15: 0.07381010683748489, x1=2.462445276927733, x2=2.451709024449558
#16: 0.05592153140154273, x1=2.507882496653863, x2=2.4223942688356495
#16: 0.10069156156133502, x1=2.6588371793206895, x2=2.3985647646606485
#16: 0.031063183448908942, x1=2.5042649472075587, x2=2.4434264830133
#16: 0.03613708392853141, x1=2.4556975106561465, x2=2.4849019996566897
#16: 0.0790691499218851, x1=2.477385194807159, x2=2.434723329404649
#16: 0.07572630321009807, x1=2.3742830739836864, x2=2.643596265814285
#17: 0.009656256558257175, x1=2.5492959208875385, x2=2.4682986996050005
#17: 0.008333720585859416, x1=2.4522280277320054, x2=2.5408687032047372
#17: 0.17150234300637354, x1=2.531074323252401, x2=2.350995980166239
#17: 0.03986776688999875, x1=2.42784562832178, x2=2.5154089471115464
#17: 0.13227464607111497, x1=2.416966053002254, x2=2.4692163045681035
#17: 0.014102304778798964, x1=2.5621353916768905, x2=2.446768647876299
#18: 0.002864832196328253, x1=2.4945310516390466, x2=2.488645763238135
#18: 0.054214905619042957, x1=2.3831692459143734, x2=2.6159951982555842
#18: 0.04938307278665337, x1=2.517675519616028, x2=2.419310652048395
#18: 0.11616479668415054, x1=2.332313724061048, x2=2.6727647329174298
#18: 0.003986587304035755, x1=2.479224971826557, x2=2.5318213378820733
#18: 0.09173386149717747, x1=2.624620979915844, x2=2.3414208702653814
#19: 0.018949964846792164, x1=2.5376321428963684, x2=2.5046299181111316
#19: 0.00728201661353757, x1=2.4735456891778096, x2=2.5008922660115456
#19: 0.02255325440617661, x1=2.45951045580425, x2=2.494290399246603
#19: 0.001963377035031722, x1=2.5109522370015496, x2=2.479256233893981
#19: 0.014403549473427438, x1=2.4384309556296344, x2=2.5428730721649857
#19: 0.005897052874434157, x1=2.5149148482676753, x2=2.4662897892467415
#20: 0.004067769251672983, x1=2.4700060880093044, x2=2.533049132870812
#20: 0.008440670409775591, x1=2.456108518473503, x2=2.5473342888751587
#20: 0.02639164343876337, x1=2.533384701445267, x2=2.4276208691792767
#20: 4.682774624472397e-05, x1=2.5025877962626555, x2=2.4964532765298557
op1 = 2.5025877962626555, op2 = 2.4964532765298557
  • 1
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
CMA-ES是一种优化算法,主要用于解决连续优化问题。下面给出使用Python实现CMA-ES算法的基本步骤: 1. 安装必要的库:numpy, scipy 2. 定义目标函数,即待优化的函数。 3. 初始化CMA-ES算法的参数。包括种群大小,初始种群分布,步长,收敛阈值等。 4. 根据CMA-ES算法公式更新种群,并计算适应度函数值。 5. 判断是否达到收敛条件,如果达到则输出最优解,否则重复执行第4步。 下面是一个简单的CMA-ES算法实现示例代码: ```python import numpy as np from scipy.optimize import minimize def cma_es(func, x0, sigma, popsize, maxiter): # 初始化参数 D = len(x0) x_mean = x0 C = np.eye(D) sigma = sigma weights = np.log(popsize/2+1) - np.log(np.arange(1,popsize+1)) mu = int(popsize/2) weights /= np.sum(weights) mueff = np.sum(weights)**2/np.sum(weights**2) cc = (4+mueff/D)/(D+4+2*mueff/D) cs = (mueff+2)/(D+mueff+5) c1 = 2/((D+1.3)**2+mueff) cmu = min(1-c1, 2*(mueff-2+1/mueff)/((D+2)**2+mueff)) damps = 1+2*max(0, np.sqrt((mueff-1)/(D+1))-1)+cs pc = np.zeros(D) ps = np.zeros(D) B = np.eye(D) D = np.ones(D) BD = np.dot(B,np.diag(D)) C = np.dot(BD,BD.T) eigeneval = 0 chiN = np.sqrt(D.shape[0])*(1-1/(4*D.shape[0])+1/(21*(D.shape[0]**2))) # 迭代寻优 for i in range(maxiter): # 生成种群 arz = np.random.randn(D.shape[0], popsize) arx = x_mean.reshape(-1,1) + sigma*(np.dot(BD, arz)) arfitness = np.zeros(popsize) for j in range(popsize): arfitness[j] = func(arx[:,j]) # 更新x_mean parent_indices = np.argsort(arfitness)[:mu] x_mean = np.dot(arx[:,parent_indices], weights) # 更新进化路径 ps = (1-cs)*ps + np.sqrt(cs*(2-cs)*mueff)*(np.dot(B, x_mean-x_old)/sigma) hsig = np.linalg.norm(ps)/np.sqrt(1-(1-cs)**(2*(i+1))) < (1.4+2/(D.shape[0]+1))*chiN pc = (1-cc)*pc + hsig*np.sqrt(cc*(2-cc)*mueff)*(x_mean-x_old)/sigma # 更新协方差矩阵 artmp = arx[:,parent_indices]-x_old.reshape(-1,1) C = (1-c1-cmu) * C + c1 * np.outer(pc,pc) + cmu * np.dot(np.dot(artmp, np.diag(weights)), artmp.T) eigeneval += mu if eigeneval > popsize/(c1+cmu)/D.shape[0]/10: eigeneval = 0 C = np.triu(C) + np.triu(C,1).T D, B = np.linalg.eig(C) D = np.sqrt(D) BD = np.dot(B, np.diag(D)) # 更新步长 sigma *= np.exp((cs/damps)*(np.linalg.norm(ps)/chiN-1)) # 判断是否收敛 if arfitness[parent_indices[0]] < 1e-10: break x_old = x_mean # 输出结果 res = minimize(func, x0) return res.x # 测试 def f(x): return np.sum(x**2) x0 = np.array([1,2,3]) sigma = 0.1 popsize = 10 maxiter = 100 result = cma_es(f, x0, sigma, popsize, maxiter) print(result) ``` 注意,在上面的代码中,目标函数需要传入一个numpy数组,而不是一个标量。另外,这里使用了scipy库中的minimize函数来检验最终的优化结果。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值