path_checkpointa=r'E:\anacondaCode\ResNet_cifar-master181010(2000) - 副本\data\cifar10_tf\checkpoint_'
ckpt = tf.train.get_checkpoint_state(path_checkpointa)#模型A的checkpoint
if ckpt and ckpt.model_checkpoint_path:
print('restore success ')
saver.restore(sess,ckpt.model_checkpoint_path)
var_list = [['logit/DW'] ,['logit/DW/Momentum '],['logit/biases'],['logit/biases/Momentum '],modelB.global_step]
initfc = tf.variables_initializer(var_list, name='init')
sess.run(initfc)
#加载整个模型 然后初始化部分层的参数
# #加载模型 除Logit之外
path_checkpointa=r'E:\anacondaCode\ResNet_cifar-master181010(2000) - 副本\data\cifar10_tf\checkpoint_'
ckpt = tf.train.get_checkpoint_state(path_checkpointa)
if ckpt and ckpt.model_checkpoint_path:
variables = tf.contrib.framework.get_variables_to_restore()#删除output层中的参数
variables_to_resotre = [v for v in variables if v.name.split('/')[0]!='logit']
saver = tf.train.Saver(variables_to_resotre)
print('restore succcessly ?')
saver.restore(sess, ckpt.model_checkpoint_path)
print('restore succcessly ')
var_to_init = [v for v in variables if v.name.split('/')[0]=='logit' ]
tf.initialize_variables(var_to_init)
加载除了最后一层logited的参数 亲测可用
之后的saver.save 也是没有Logit参数 所以这种方式保存modelB 不可以的