import matplotlib.pyplot as plt
import numpy as np
# gradient descent
# 全部变量的偏导数汇总而成的向量称为梯度
def num_diff(f, x): # 求单变量数值微分(偏导数),差分求导方式
h = 1e-4
grad = np.zeros_like(x)
for i in range(x.size):
tmp = x[i]
# 差分求导 == 求微分 计算公式: f(x+h) - f(x-h) / 2 * h
x[i] = tmp + h
fxh1 = f(x)
x[i] = tmp - h
fxh2 = f(x)
grad[i] = (fxh1 - fxh2) / (2 * h)
x[i] = tmp # 将值还原
return grad
# 梯度法
def grad_des(f, init_x, lr=0.1, step=100):
x = init_x
for i in range(step):
grad = num_diff(f, x)
x -= lr * grad
return x
# model
def model(x):
return x[0]**2 + x[1]**2
init_x = np.array([-3.0, 4.0])
res = grad_des(model, init_x)
print(res)
gradient descent 梯度下降法
最新推荐文章于 2021-12-21 14:37:22 发布