《深度学习的数学》 梯度下降法 Excel版
《深度学习的数学》 梯度下降法 python版
from sympy import *
import time
def gradient_descent(init_x,init_y,lr):
x = symbols("x")
y = symbols("y")
z = pow(x,2) + pow(y,2)
dif_x = diff(z,x)
dif_y = diff(z,y)
offset_x = 0
offset_y = 0
for no in range(30):
x_val = init_x
y_val = init_y
dif_x_val = dif_x.subs("x",init_x)
dif_y_val = dif_y.subs("y",init_y)
offset_x = -lr * dif_x_val
offset_y = -lr * dif_y_val
init_x = init_x + offset_x
init_y = init_y + offset_y
z_val = round(z.evalf(subs={x:x_val,y:y_val}),2)
print("No:",no,"x值:",round(init_x,2),"y值:",round(init_y,2),
"x梯度:",round(dif_x_val,2),"y梯度:",round(dif_y_val,2),
"x位移向量:",round(offset_x,2),"y位移向量:",round(offset_y,2),
"函数值:",z_val)
time.sleep(0.1)
x = 3
y = 2
lr = 0.1
gradient_descent(x,y,lr)
运行结果: