代码如下:
import onnxruntime
import numpy as np
device_name = 'cpu' # or 'cpu'
if device_name == 'cpu':
providers = ['CPUExecutionProvider']
elif device_name == 'cuda:0':
providers = ['CUDAExecutionProvider', 'CPUExecutionProvider']
# Create inference session
onnx_model = onnxruntime.InferenceSession('./model/resnet.onnx', providers=providers)
# Create the input(这里的输入对应的输入)
data = np.random.rand(1,3, 224, 224).astype(np.float32)
data2 = input_img.asnumpy().astype(np.float32)
# Inference
onnx_input = {onnx_model.get_inputs()[0].name: data2}
outputs = onnx_model.run(None, onnx_input)
print(outputs)