炼数成金Tensorflow学习笔记之4.4_优化器
代码及分析
"""
Created on Sat Mar 21 13:53:17 2020
@author: 寒火qwer
"""
import tensorflow as tf
from tensorflow. examples. tutorials. mnist import input_data
mnist = input_data. read_data_sets( '.\MNIST_data' , one_hot= True )
batch_size = 100
n_batch = mnist. train. num_examples // batch_size
x = tf. placeholder( tf. float32, [ None , 784 ] )
y = tf. placeholder( tf. float32, [ None , 10 ] )
w = tf. Variable( tf. zeros( [ 784 , 10 ] ) )
b = tf. Variable( tf. zeros( [ 10 ] ) )
p = tf. matmul( x, w) + b
prediction = tf. nn. softmax( p)
loss = tf. reduce_mean( tf. nn. softmax_cross_entropy_with_logits( labels= y, logits= p) )
train_op = tf. train. AdamOptimizer( 1e - 2 ) . minimize( loss)
init_op = tf. global_variables_initializer( )
correct_prediction = tf. equal( tf. argmax( y, 1 ) , tf. argmax( prediction, 1 ) )
accuracy = tf. reduce_mean( tf. cast( correct_prediction, tf. float32) )
with tf. Session( ) as sess:
sess. run( init_op)
for epoch in range ( 21 ) :
for batch in range ( n_batch) :
batch_x, batch_y = mnist. train. next_batch( batch_size)
sess. run( train_op, feed_dict= { x: batch_x, y: batch_y} )
acc = sess. run( accuracy, feed_dict= { x: mnist. test. images, y: mnist. test. labels} )
print ( "iter" + str ( epoch) + ", testing acc: " + str ( acc) )
tf.train.AdamOptimizer .init (learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-08, use_locking=False, name=‘Adam’) learning_rate:张量或浮点值。学习速率 beta1:一个浮点值或一个常量浮点张量。一阶矩估计的指数衰减率 beta2:一个浮点值或一个常量浮点张量。二阶矩估计的指数衰减率 epsilon:数值稳定性的一个小常数 use_locking:如果True,要使用lock进行更新操作 `name``:应用梯度时为了创建操作的可选名称。默认为“Adam”
更多优化方式可参见此链接