1、训练
loss = tf.nn.softmax_cross_entropy_with_logits(y, labels=labels)
cost = tf.reduce_sum(loss)
# global_steps = tf.Variable(0)
# learning_rate = tf.train.exponential_decay(learning_rate_orig, global_steps, num_minibatches * 40, 0.1, staircase = True)
# train = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
# train = tf.train.AdamOptimizer(learning_rate).minimize(cost)
train = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
2、测试
with tf.name_scope(“test”):
correct_predict = tf.equal(tf.argmax(tf.nn.softmax(model.fc1), 1), tf.argmax(labels, 1))
accuracy = tf.reduce_mean(tf.cast(correct_predict, tf.float32))
3、保存模型
save = tf.train.Saver()
save.save(sess, checkpoint_path, global_step=i )
4、加载模型
#获取要加载的变量方式一
exclude=[‘Mixed_7c’,‘Mixed_7b’,‘AuxLogits’,‘AuxLogits’,‘Logits’,‘Predictions’]
variables_to_restore=slim.get_variables_to_restore(exclude=exclude)
#方式二
var1 = tf.global_variables() #获取全局变量
var2 = tf.local_variables() #获取局部变量
saver=tf.train.Saver(variables_to_restore)
saver.restore(sess, model_path)
save = tf.train.Saver()