转载自:https://blog.csdn.net/u013555719/article/details/79334359#commentBox
# 初始的学习速率是0.1,总的迭代次数是1000次,如果staircase=True,那就表明每decay_steps次计算学习速率变化,更新原始学习速率,
# 如果是False,那就是每一步都更新学习速率。红色表示False,蓝色表示True。
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
learning_rate = 0.1 # 初始学习速率时0.1
decay_rate = 0.96 # 衰减率
steps = 1000 # 总的迭代次数
decay_steps = 100 # 衰减次数
global_step = tf.Variable(0, trainable=False)
# 自适应学习率衰减:decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
c = tf.train.exponential_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=True)
d = tf.train.exponential_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=False)
# 反时限学习率衰减:decayed_learning_rate = learning_rate / (1 + decay_rate * t)
c = tf.train.inverse_time_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=True)
d = tf.train.inverse_time_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=False)
# 学习率自然指数衰减:decayed_learning_rate = learning_rate * exp(-decay_rate * global_step)
c = tf.train.natural_exp_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=True)
d = tf.train.natural_exp_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=False)
# 常数分片学习率衰减:piecewise_constant(x, boundaries, values, name=None)
boundaries = [200, 400, 600, 800]
values = [0.1, 0.05, 0.01, 0.005, 0.0001]
c = tf.train.piecewise_constant(global_step, boundaries, values)
T_C = []
F_D = []
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(steps):
T_c = sess.run(c, feed_dict={global_step: i})
T_C.append(T_c)
F_d = sess.run(d, feed_dict={global_step: i})
F_D.append(F_d)
plt.figure(1)
plt.plot(range(steps), F_D, 'r-') # "-"表示折线图,r表示红色,b表示蓝色
plt.plot(range(steps), T_C, 'b-')
plt.show()