tensorflow savemodel与pb(ckpt)互相转换

ckpt ------> save_model

import tensorflow as tf
import sys
 
trained_checkpoint_prefix = sys.argv[1]
export_dir = sys.argv[2]
graph = tf.Graph()
config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=True)
with tf.compat.v1.Session(graph=graph, config=config) as sess:
    # Restore from checkpoint
    loader = tf.compat.v1.train.import_meta_graph(trained_checkpoint_prefix + '.meta')
    loader.restore(sess, trained_checkpoint_prefix)
 
    # Export checkpoint to SavedModel
    builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(export_dir)
    builder.add_meta_graph_and_variables(sess, [tf.saved_model.TRAINING, tf.saved_model.SERVING], strip_default_attrs=True)
    builder.save()

使用ckpt转换成save_model不存在Variable为空的情况。(pb转换成save_model时候variable为空)
—————————————————————————————————————————————
ckpt ------> pb

import tensorflow as tf
from tensorflow.python import pywrap_tensorflow
import os
# reader=pywrap_tensorflow.NewCheckpointReader('./student_weight/model.ckpt-5')
# var_to_shape_map=reader.get_variable_to_shape_map()
# f1 = open('old_node.txt','w') 
# for key in var_to_shape_map:
#     print('tensor_name: ',key)
#     f1.write(key)
#     f1.write('\n')

# reader1=pywrap_tensorflow.NewCheckpointReader('./data_mrc_bert/model_v1/model.ckpt-250')
# var_to_shape_map1=reader1.get_variable_to_shape_map()
# f2= open('stu_node.txt','w') 
# for key in var_to_shape_map1:
#     print('tensor_name: ',key)
#     f2.write(key)
#     f2.write('\n')

import tensorflow as tf
from tensorflow.python.framework import graph_util
from tensorflow.python.platform import gfile
 
def freeze_graph(ckpt, output_graph):
    output_node_names = 'metrics/probabilities'
    #output_node_names = 'bert/encoder/layer_6/attention/output/dense/bias/adam_v'

    # saver = tf.train.import_meta_graph(ckpt+'.meta', clear_devices=True)
    saver = tf.compat.v1.train.import_meta_graph(ckpt+'.meta', clear_devices=True)
    graph = tf.get_default_graph()
    input_graph_def = graph.as_graph_def()
 
    with tf.Session() as sess:
        saver.restore(sess, ckpt)
        output_graph_def = graph_util.convert_variables_to_constants(
            sess=sess,
            input_graph_def=input_graph_def,
            output_node_names=output_node_names.split(',')
        )
        with tf.gfile.GFile(output_graph, 'wb') as fw:
            fw.write(output_graph_def.SerializeToString())
        print ('{} ops in the final graph.'.format(len(output_graph_def.node)))
ckpt = './student_weight_3soft/model.ckpt-'
if __name__ == '__main__':
    for i in range(6,11):
        ckpt_path = ckpt + str(i)     
        freeze_graph(ckpt_path, ckpt_path + '.pb')

—————————————————————————————————————————————

save_model --------> pb

from tensorflow.python.framework.graph_util import convert_variables_to_constants
import tensorflow as tf
sess = tf.Session()
tf.saved_model.loader.load(sess, ["serve"], "./1634545685")
graph = tf.get_default_graph()
# 获取输入输出的tensor_name,而这个是在定义自己的operation的时候指定的

output_graph_def = convert_variables_to_constants(sess, sess.graph_def,
                                                    output_node_names=['metrics/probabilities'])
with tf.gfile.FastGFile('./new_pb.pb', mode='wb') as f:
    f.write(output_graph_def.SerializeToString())

—————————————————————————————————————————————

pb --------> save_model(这里variable为空,网上也有许多错误代码,建议还是用ckpt转save_model)

  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值