tensorflow实现逻辑斯特回归(logistic regression)
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials import mnist
mnist = mnist.input_data.read_data_sets('./data/mnist',one_hot=True)
x = tf.placeholder(dtype=tf.float32,shape=[None,784],name='input')
y = tf.placeholder(dtype=tf.float32,shape=[None,10],name='output')
w = tf.Variable(tf.random_normal([784,10],0,0.05,dtype=tf.float32),name='weights')
b = tf.Variable(tf.random_normal([1,10],dtype=tf.float32),name='bias')
logits = tf.add(tf.matmul(x,w),b)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(logits,1),tf.argmax(y,1)),tf.float32))
lr = 0.01
epochs = 100
batch_size = 64
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits,labels=y),name='loss')
optimizer = tf.train.AdamOptimizer(learning_rate=lr).minimize(loss)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
writer = tf.summary.FileWriter('./graphs/logistic_reg',sess.graph)
n_batch = mnist.train.num_examples//batch_size
for epoch in range(epochs):
train_loss = 0
train_acc = 0
for _ in range(n_batch):
xs,ys = mnist.train.next_batch(batch_size)
_,batch_loss,batch_acc = sess.run([optimizer,loss,accuracy],feed_dict={x:xs,y:ys})
train_loss += batch_loss
train_acc += batch_acc
if epoch % 10 == 0:
val_loss,val_acc = sess.run([loss,accuracy],feed_dict={x:mnist.validation.images,y:mnist.validation.labels})
print('epoch {} train loss = {},train acc {},the val loss={},val acc {}'\
.format(epoch,train_loss/n_batch,train_acc/n_batch,val_loss,val_acc))
test_loss,test_acc= sess.run([loss,accuracy],feed_dict={x:mnist.test.images,y:mnist.test.labels})
print('the model on test loss is {},the accuracy is {}'.format(test_loss,test_acc))
writer.close()