遗传算法python程序_简单遗传算法-python实现

1 importnumpy as np2 from GAIndividual importGAIndividual3 importrandom4 importcopy5 importmatplotlib.pyplot as plt6

7

8 classGeneticAlgorithm:9

10 ‘‘‘

11 The class for genetic algorithm12 ‘‘‘

13

14 def __init__(self, sizepop, vardim, bound, MAXGEN, params):15 ‘‘‘

16 sizepop: population sizepop17 vardim: dimension of variables18 bound: boundaries of variables19 MAXGEN: termination condition20 param: algorithm required parameters, it is a list which is consisting of crossover rate, mutation rate, alpha21 ‘‘‘

22 self.sizepop =sizepop23 self.MAXGEN =MAXGEN24 self.vardim =vardim25 self.bound =bound26 self.population =[]27 self.fitness = np.zeros((self.sizepop, 1))28 self.trace = np.zeros((self.MAXGEN, 2))29 self.params =params30

31 definitialize(self):32 ‘‘‘

33 initialize the population34 ‘‘‘

35 for i inxrange(0, self.sizepop):36 ind =GAIndividual(self.vardim, self.bound)37 ind.generate()38 self.population.append(ind)39

40 defevaluate(self):41 ‘‘‘

42 evaluation of the population fitnesses43 ‘‘‘

44 for i inxrange(0, self.sizepop):45 self.population[i].calculateFitness()46 self.fitness[i] =self.population[i].fitness47

48 defsolve(self):49 ‘‘‘

50 evolution process of genetic algorithm51 ‘‘‘

52 self.t =053 self.initialize()54 self.evaluate()55 best =np.max(self.fitness)56 bestIndex =np.argmax(self.fitness)57 self.best =copy.deepcopy(self.population[bestIndex])58 self.avefitness =np.mean(self.fitness)59 self.trace[self.t, 0] = (1 - self.best.fitness) /self.best.fitness60 self.trace[self.t, 1] = (1 - self.avefitness) /self.avefitness61 print("Generation %d: optimal function value is: %f; average function value is %f" %(62 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))63 while (self.t < self.MAXGEN - 1):64 self.t += 1

65 self.selectionOperation()66 self.crossoverOperation()67 self.mutationOperation()68 self.evaluate()69 best =np.max(self.fitness)70 bestIndex =np.argmax(self.fitness)71 if best >self.best.fitness:72 self.best =copy.deepcopy(self.population[bestIndex])73 self.avefitness =np.mean(self.fitness)74 self.trace[self.t, 0] = (1 - self.best.fitness) /self.best.fitness75 self.trace[self.t, 1] = (1 - self.avefitness) /self.avefitness76 print("Generation %d: optimal function value is: %f; average function value is %f" %(77 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))78

79 print("Optimal function value is: %f;" %

80 self.trace[self.t, 0])81 print "Optimal solution is:"

82 printself.best.chrom83 self.printResult()84

85 defselectionOperation(self):86 ‘‘‘

87 selection operation for Genetic Algorithm88 ‘‘‘

89 newpop =[]90 totalFitness =np.sum(self.fitness)91 accuFitness = np.zeros((self.sizepop, 1))92

93 sum1 =0.94 for i inxrange(0, self.sizepop):95 accuFitness[i] = sum1 + self.fitness[i] /totalFitness96 sum1 =accuFitness[i]97

98 for i inxrange(0, self.sizepop):99 r =random.random()100 idx =0101 for j in xrange(0, self.sizepop - 1):102 if j == 0 and r

105 elif r >= accuFitness[j] and r < accuFitness[j + 1]:106 idx = j + 1

107 break

108 newpop.append(self.population[idx])109 self.population =newpop110

111 defcrossoverOperation(self):112 ‘‘‘

113 crossover operation for genetic algorithm114 ‘‘‘

115 newpop =[]116 for i in xrange(0, self.sizepop, 2):117 idx1 = random.randint(0, self.sizepop - 1)118 idx2 = random.randint(0, self.sizepop - 1)119 while idx2 ==idx1:120 idx2 = random.randint(0, self.sizepop - 1)121 newpop.append(copy.deepcopy(self.population[idx1]))122 newpop.append(copy.deepcopy(self.population[idx2]))123 r =random.random()124 if r

133 defmutationOperation(self):134 ‘‘‘

135 mutation operation for genetic algorithm136 ‘‘‘

137 newpop =[]138 for i inxrange(0, self.sizepop):139 newpop.append(copy.deepcopy(self.population[i]))140 r =random.random()141 if r < self.params[1]:142 mutatePos = random.randint(0, self.vardim - 1)143 theta =random.random()144 if theta > 0.5:145 newpop[i].chrom[mutatePos] =newpop[i].chrom[146 mutatePos] - (newpop[i].chrom[mutatePos] - self.bound[0, mutatePos]) * (1 - random.random() ** (1 - self.t /self.MAXGEN))147 else:148 newpop[i].chrom[mutatePos] =newpop[i].chrom[149 mutatePos] + (self.bound[1, mutatePos] - newpop[i].chrom[mutatePos]) * (1 - random.random() ** (1 - self.t /self.MAXGEN))150 self.population =newpop151

152 defprintResult(self):153 ‘‘‘

154 plot the result of the genetic algorithm155 ‘‘‘

156 x =np.arange(0, self.MAXGEN)157 y1 =self.trace[:, 0]158 y2 = self.trace[:, 1]159 plt.plot(x, y1, ‘r‘, label=‘optimal value‘)160 plt.plot(x, y2, ‘g‘, label=‘average value‘)161 plt.xlabel("Iteration")162 plt.ylabel("function value")163 plt.title("Genetic algorithm for function optimization")164 plt.legend()165 plt.show()

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值