ONNX 动态batch_size导出
通过修改onnx图结构中输入输出的tensor尺寸来实现动态batch
import onnx
from onnx import shape_inference
model = onnx.load(r"FaceDetector.onnx")
input_info = model.graph.input[0]
input_name = input_info.name
input_shape = input_info.type.tensor_type.shape.dim
input_dtype = input_info.type.tensor_type.elem_type
output_info = model.graph.output
input_shape[0].dim_param = "batch_size"
for o in output_info:
o.type.tensor_type.shape.dim[0].dim_param = "batch_size"
model = shape_inference.infer_shapes(model) # infer shapes
onnx.checker.check_model(model) # check the model for errors
onnx.save(model, "dynamic_face_detector.onnx") # save the model