本来还想开开心心地结束第五章,谁知5.4节遇上大坑了。。
import tensorflow as tf
import tensorlayer as tl
from stringclean import *
import numpy as np
vocabulary_size = 50000
embedding_size = 128
model_file_name = "model_word2vec_50k_128"
batch_size = None
_UNK = "_UNK"
sess = tf.InteractiveSession()
all_var = tl.files.load_npy_to_any(name=model_file_name + '.npy')
data = all_var['data']
count = all_var['count']
dictionary = all_var['dictionary']
reverse_dictionary = all_var['reverse_dictionary']
print("~~~~~~~Loading npy successfully~~~~~~~~~~~~")
tl.nlp.save_vocab(count, name='vocab_' + model_file_name + '.txt')
del all_var, data, count
# load_params= tl.files.load_npz(name=model_file_name + '.npz')
load_params= tl.files.load_npz(name='53model.npz')
print("~~~~~~~Loading npz successfully~~~~~~~~~~~~")
x = tf.placeholder(tf.int32, shape=[batch_size])
emb_net = tl.layers.EmbeddingInputlayer(inputs=x, vocabulary_size=vocabulary_size, embedding_size=embedding_size, name='embedding_layer')
tl.files.assign_params(sess, load_params, emb_net)
tl.layer