import pprint, pickle
#对象
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=max_words)
tokenizer.fit_on_texts(train)
#存储至文件
tokenizer_file_path = os.path.join(FLAGS.pre_model_path, FLAGS.tokenizer_file)
with open( tokenizer_file_path , 'wb') as tokenizer_file:
pickle.dump(tokenizer,tokenizer_file,-1) #最新protocol
pprint.pprint(tokenizer)
#从文件加载
print("tokenizer reload:")
try:
with open(tokenizer_file_path, 'rb') as pkl_file:
tokenizer_reload = pickle.load(pkl_file)
except EOFError:
pass
pprint.pprint(tokenizer_reload)
pickle模块存储对象
最新推荐文章于 2023-07-17 15:51:35 发布