声明:版权所有,转载请联系作者并注明出处
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
def init_weights(shape):
return tf.Variable(tf.random_normal(shape, stddev=0.01))
def model(X, W):
return tf.matmul(X, W) # notice we use the same model as linear regression, this is because there is a baked in cost function which performs softmax and cross entropy
mnist = input_data.read_data_sets("/tmp/data", one_hot=True)
train_X, train_Y, test_X, test_Y = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
X = tf.placeholder("float", [None, 784]) # create symbolic variables
Y = tf.placeholder("float", [None, 10])
W = init_weights([784, 10]) # like in linear regression, we need a shared variable weight matrix for logistic regression
py_x = model(X, W)
# defined the cost function, compute mean cross entropy (softmax is applied internally)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(py_x, Y))
# construct optimizer
train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost)
# Launch the graph in a session
with tf.Session() as sess:
# you need to initialize all variables
tf.initialize_all_variables().run()
for i in range(100):
for start, end in zip(range(0, len(train_X), 128), range(128, len(train_X)+1, 128)):
sess.run(train_op, feed_dict={X: train_X[start:end], Y: train_Y[start:end]})
print(i, np.mean(np.argmax(test_Y, axis=1) ==
sess.run(tf.argmax(py_x, 1), feed_dict={X: test_X})))