代码
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
import matplotlib as mp
import matplotlib.pyplot as plot
mnist = input_data.read_data_sets("MnisData", one_hot=True)
#定义卷积核
filter = tf.Variable(tf.random_normal(shape=[5,5,1,3], stddev=1))
inputdataPlaceHolder = tf.placeholder(dtype=tf.float32, shape=[None, 28,28, 1])
LabelPlaceHolder = tf.placeholder(dtype=tf.float32, shape=[None, 10])
#定义网络结构
level1 = tf.nn.conv2d(inputdataPlaceHolder, filter, strides=[1,1,1,1], padding='SAME')
level1 = tf.reduce_sum(level1,3)
level1 = tf.reshape(level1,[-1,784])
level2 = tf.layers.dense(level1, 250, activation=tf.nn.sigmoid , kernel_initializer= tf.random_normal_initializer(stddev=0.1))
level3 = tf.layers.dense(level2, 300, activation=tf.nn.sigmoid , kernel_initializer= tf.random_normal_initializer(stddev=0.1))
output = tf.layers.dense(level3, 10, activation=tf.nn.sigmoid , kernel_initializer= tf.random_normal_initializer(stddev=0.1))
#加入softmax层
output = tf.nn.softmax(output)
loss = tf.reduce_mean( tf.square(output - LabelPlaceHolder))
global_step = tf.Variable(0)
learning_rate = tf.train.exponential_decay(1e-3, global_step, 1000, 0.9, staircase=False)
train = tf.train.AdamOptimizer(learning_rate).minimize(loss, global_step=global_step)
setpAry = []
lossAry = []
saver = tf.train.Saver()
with tf.Session() as sess:
tf.global_variables_initializer().run()
saver.restore(sess, "save/data-1800")
while True:
data, label = mnist.train.next_batch(1000)
data = data.reshape([-1,28,28,1])
_, step = sess.run([train, global_step], feed_dict={
inputdataPlaceHolder:data,
LabelPlaceHolder:label
})
if step % 100 == 0:
data, label = mnist.test.next_batch(1)
data = data.reshape([-1, 28, 28, 1])
lossTemp,Temp = sess.run([loss, output], feed_dict={
inputdataPlaceHolder:data,
LabelPlaceHolder:label})
setpAry.append(step)
lossAry.append(lossTemp*100)
print(lossTemp)
mp.pyplot.clf()
mp.pyplot.cla()
mp.pyplot.plot(setpAry, lossAry)
mp.pyplot.show()
mp.pyplot.close()
print(Temp)
print(label)
print("-----------------")
saver.save(sess, "save/data", global_step=step )
if lossTemp < 1e-6:
break
准确率
Mnist.validation 97.88%
Mnist.test 97.55%
Mnist.train 99.69%