使用python API,对openvino和ONNX Runtime的推理时间进行比较。有学习并借用一些其他up的代码。
#openvino
from openvino.inference_engine import IECore
import numpy as np
import cv2
import time
ie = IECore()
model="shufflenet-v2-10.onnx"
#model="shufflenet-v2-10/shufflenet-v2-10.xml"
net = ie.read_network(model=model)
input_blob = next(iter(net.input_info))
out_blob = next(iter(net.outputs))
net.batch_size=1#batchsize
n, c, h, w = net.input_info[input_blob].input_data.shape
p