废话不多说,放几张图上来。
找到这个函数的极小值点,用机器学习的方法,简单来说就是利用梯度下降去更新权值。代码如下:
import numpy as np
from mpl_tookkits.mplot3d import Axes3D
from matplotlibs import pyplot as plt
import torch
def himmelblau(x):
return (x[0]**2 + x[1] - 1)**2 + (x[0] + x[1]**2 -7) **2
x=np.arange(-6,6,0.1)
y=np.arange(-6,6,0.1)
print('x,y range:',x.shape,y.shape)
X,Y = np.meshgrid(x,y)
print('X,Y maps:',X.shpe,Y.shape)
Z = himmelblau([X,Y])
fig = plt.figure('himmelblau')
ax = fig.gca(prediction='3D')
ax.plot_surface(X,Y,2)
ax.view_init(60,-30)
ax.set_xlabel('x')
ax.set_ylabel('y')
plt.show()
x=torch.tensor([0.,0.],requires_grad=True)
optimizer = torch.optim.Adam([x],lr=1e-3)
for step in range(20000):
pred = himmelblau(x)
optimizer.zero_grad()
pred.backward()
optimizer.step()
if step % 2000 == 0:
print('step {}: x={}, f(x) = {}'.format(step,x.tolist(),pred.item()))
xxx这个人讲的东西不错的。