在tensorflow2.1,keras2.3.1时,TF_KERAS=1保存为tf
keras.models.save_model(model, "model_save_path_1", save_format='tf')
pd 转onnx
python -m tf2onnx.convert --saved-model ./saved_model.pb --opset 13 --output ./model.onnx
keras 转onnx
import onnx
import keras2onnx
onnx_model = keras2onnx.convert_keras(model, model.name)
temp_model_file = 'model.onnx'
onnx.save_model(onnx_model, temp_model_file)
meta 文件转pd
name_to_features = {
"input_ids": tf.FixedLenFeature([FLAGS.max_seq_length], tf.int64),
"input_mask": tf.FixedLenFeature([FLAGS.max_seq_length], tf.int64),
"segment_ids": tf.FixedLenFeature([FLAGS.max_seq_length], tf.int64),
"label_ids": tf.FixedLenFeature([], tf.int64),
"is_real_example": tf.FixedLenFeature([], tf.int64),
}
def my_build_serving_input_receiver_fn(cols_description):
def serving_input_receiver_fn():
serialized_tf_example = tf.placeholder(dtype=tf.string, shape=None,
name='input_example_tensor')
# key (e.g. 'examples') should be same with the inputKey when you
# buid the request for prediction
receiver_tensors = {'input_examples': serialized_tf_example}
features = tf.parse_example(serialized_tf_example, cols_description)
return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
return serving_input_receiver_fn
serving_input_receiver_fn = my_build_serving_input_receiver_fn(name_to_features)
pb_export_dir = 'output_pd'
estimator.export_saved_model(pb_export_dir, serving_input_receiver_fn=serving_input_receiver_fn)