步骤准备:
1 数据准备:可以直接从
'/tmp/tensorflow/mnist/input_data'
中获取
2 创建模型:x W b
3 定义损失函数和优化形式(采用softmax分类器)
4 启动会话(Session)
5 训练模型
6 测试并计算输出
其中softmax分类器的原理请参考:Softmax回归
程序:
#TensorFlow手写数目识别
import tensorflow.examples.tutorials.mnist as inputData
import tensorflow as tf
#导入命令行解析模块
import argparse
import sys
#Import data
data_dir='/tmp/tensorflow/mnist/input_data'
mnist=inputData.input_data.read_data_sets(data_dir,one_hot=True)
#Create model (x,W,b, Loss)
# The image size is of 28*28
#None means any length
x=tf.placeholder(tf.float32,[None, 784])
#W b
W=tf.Variable(tf.zeros([784,10]))
b=tf.Variable(tf.zeros([10]))
#Predict
y=tf.matmul(x,W)+b
#Define loss and optimizer
y_=tf.placeholder(tf.float32,[None,10])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.nn.softmax(y)),
# reduction_indices=[1]))
# tf.reduce_sum adds the elements in the second dimension of y,
# due to the reduction_indices=[1] parameter.
# tf.reduce_mean computes the mean over all the examples in the batch.
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
# outputs of 'y', and then average across the batch.
loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits=y))
optimizer=tf.train.GradientDescentOptimizer(0.5).minimize(loss)
#Start Session
sess=tf.InteractiveSession()
#initial variables
tf.global_variables_initializer().run()
#Train --stochastic training
for _ in range(100):
# 100 data points are randomly selected from the training data set
batch_x,batch_y=mnist.train.next_batch(100)
#trraining
sess.run(optimizer,feed_dict={x:batch_x,y_:batch_y})
#Test
#tf.equal: check if the pridction equals the label, if equal, True; else, False
#tf.argmax: obtain the opsition of the max value in row(1)
correct_prdiction=tf.equal(tf.argmax(y,1),tf.argmax(y_,1))
#tf.cast: convert correct_prdiction to tf.float32
accuracy=tf.reduce_mean(tf.cast(correct_prdiction,tf.float32))
print(sess.run(accuracy,feed_dict={x:mnist.test.images,
y_:mnist.test.labels}))
运行结果:0.8868