分别创建多个会话,多个图,每个会话中运行不同的图,最后关闭会话。
graph1 = tf.Graph()
graph2 = tf.Graph()
sess1 = tf.Session(graph=graph1)
sess2 = tf.Session(graph=graph2)
with sess1.as_default():
with graph1.as_default():
tf.global_variables_initializer().run()
#saver1 = tf.train.Saver(tf.global_variables())
saver1 = tf.train.import_meta_graph(model_dir + '/bert_dssm_net.ckpt-00008972.meta')
ckpt1 = tf.train.get_checkpoint_state(model_dir)
saver1.restore(sess1, ckpt1.model_checkpoint_path)
word_embedding = graph1.get_tensor_by_name('bert/embeddings/word_embeddings:0')
print(word_embedding.eval()
word_embedding = word_embedding.eval()
with sess2.as_default():
with graph2.as_default():
tf.global_variables_initializer().run()
#saver1 = tf.train.Saver(tf.global_variables())
model_new_dir = "/home/ad-trigger/special_ad_evaluate/filter_by_wordvec/bert_finetune_model_v2"
saver2 = tf.train.import_meta_graph(model_new_dir + '/model.ckpt-4000.meta')
#加载模型参数值
saver2.restore(sess2, tf.train.latest_checkpoint(model_new_dir))
ckpt2 = tf.train.get_checkpoint_state(model_new_dir)
saver2.restore(sess2, ckpt2.model_checkpoint_path)
word_embedding2 = graph2.get_tensor_by_name('bert/embeddings/word_embeddings:0')
print(word_embedding2.eval())
word_embedding2 = word_embedding2.eval()
print((word_embedding==word_embedding2).all())
sess1.close()
sess2.close()