TensorFlow for machine learning 基本模板
import tensorflow as tf
import os
W = tf.Variable(tf.zeros([5,1]),name = 'weight')
b = tf.Variable(0,name = 'bias')
def combine_inputs(X):
return tf.mul(X,W)+b
def inference(X):
return tf.sigmoid(combine_inputs(X))
def loss(X,Y):
return tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(combine_inputs(X) , Y) )
def train(total_loss)
learning_rate = 0.00001
return tf.train.GraidentDescendOptimizer(learning_rate).minimize(total_loss)
def evaluate(X):
return tf.cast(interence(X)>0.5 , tf.float32)
With tf.Session() as sess:
tf.initial_all_variables().run()
X,Y = inputs()
total_loss = loss(X,Y)
train_op = train(total_loss)
coord = tf.train.Coordinator()
threads = tf.train_start_queue_runners(sess = sess ,coord = coord)
training_step = 1000
for step in range(training_step):
sess.run([train_op])
evaluate(X,Y)