手动实现SDG、Momentum、AdaGrad、RMSProp、Adam等常用优化器。
import math
def func(a):
return 2 * a
# SDG
# w: = w - lr * dw
def sdg(f_grad, lr=0.01, x_init=1, turns=20):
results = [x_init]
for i in range(turns):
x_init -= lr * f_grad(x_init)
results.append(x_init)
return results
# Momentum
# w: = w - lr * v
# v: = upsilon * v + dw
def momentum(f_grad, lr=0.01, upsilon=0.9, x_init=1, turns=20):
v = 0
results = [x_init]
for i in range(turns):
v = lr * upsilon * v + lr * f_grad(x_init)
x_init -= v
results.append(x_init)
return results
# AdaGrad
# w: = w - lr * (1 / sqrt(s + eps)) * dw
# s: = s + dw ** 2
def ada_grad(f_grad, lr=0.01, x_init=1, turns=20):
s, eps = 0, 1e-6
results = [x_init]
for i in range(turns):
s += f_grad(x_init) ** 2
x_init