# define model# launch a session to compute the graph
with tf.Session() as sess:
writer = tf.summary.FileWriter("./graphs", sess.graph) #
for step in range(training_steps):
sess.run([optimizer])
# Go to terminal, run:# $ python [yourprogram].py# $ tensorboard --logdir="./graphs" --port 6006# Then open your browser and go to: http://localhost:6006/
2. Saving and Restoring Variables
# checkpoint
global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name="global_step") #
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy,
global_step = global_step) # # define model# launch a session to compute the graph
saver = tf.train.Saver() # with tf.Session() as sess:
for step in range(training_steps):
sess.run([train_op])
if (i + 1)% 300 == 0: #
saver.save(sess, './checkpoints/ckpt', global_step=global_step) #
# define model# launch a session to compute the graphwith tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(os.path.dirname('./checkpoints/ckpt')) # if ckpt and ckpt.model_checkpoint_path: #
saver.restore(sess, ckpt.model_checkpoint_path) # for step in range(training_steps):
sess.run([optimizer])
3. Visualize our summary statistics during our training
# define modelwith tf.name_scope("summaries"): #
tf.summary.image('input', x_image, 4) #
tf.summary.scalar("accuracy", accuracy) #
tf.summary.histogram("loss", cross_entropy)#
summary_op = tf.summary.merge_all() # # launch a session to compute the graphwith tf.Session() as sess:
writer = tf.summary.FileWriter("./graphs", sess.graph) # for step in range(training_steps):
sess.run([train_op])
summary = sess.run(summary_op, feed_dict={x: batch[0], y_: batch[1],
keep_prob: 0.5}) #
writer.add_summary(summary, global_step = i) #