import tensorflow as tf
import matplotlib.pyplot as plt
learning_rate = tf.Variable(initial_value=0.9,dtype=tf.float32)
learning_rate1 = tf.Variable(0.9,dtype=tf.float32)
decay_rate = 0.99
global_steps = 1000
decay_steps = 100
global_ = tf.placeholder(tf.int32)
c = tf.train.exponential_decay(learning_rate, global_, decay_steps, decay_rate, staircase=True)
d = tf.train.exponential_decay(learning_rate, global_, decay_steps, decay_rate, staircase=False)
ema = tf.train.ExponentialMovingAverage(0.99)
ema_apply=ema.apply([learning_rate1])
T_C = []
T_D = []
T_E = []
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.assign(learning_rate1,0.85))
for i in range(global_steps):
sess.run(ema_apply)
T_c,T_d=sess.run([c,d],feed_dict={global_: i})
_,T_e=sess.run([learning_rate1,ema.average(learning_rate1)])
T_C.append(T_c)
T_D.append(T_d)
T_E.append(T_e)
plt.figure(1)
plt.plot(range(global_steps), T_C, 'b-')
plt.plot(range(global_steps), T_D, 'r-')
plt.plot(range(global_steps), T_E, 'g-')
plt.axis([0,1000,0.6,1])
plt.show()
import tensorflow as tf
v1 = tf.placeholder(tf.float64)
ema = tf.train.ExponentialMovingAverage(0.99) #这里的第二个参数如果指定了可以加快迭代速度。但是会更复杂
maintain_average = ema.apply([v1])
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init, feed_dict={v1:5})
for i in range(10):
print(sess.run([maintain_average, v1, ema.average(v1)], feed_dict={v1:5}))