Cpp OpenVino DenseNet部署主体
Python版本导引
CMake工程示例
测试性质的工程结构如下:
Project:
Network
: 存放网络推理相关- DebugTools: 打印网络结构
- DenseGradeWrapper: 推理主结构
- NetworkTools: 部分CV辅助函数
Script
: 存放前面博客中的导出以及验证Py文件CMakeLists.txt
: cmake工程文件main.cpp
: 简单的执行文件
DenseGradeWrapper
这里给出了我代码风格的封装,进行模型读取前注意中英文路径
,我下载的OpenVINO不会报一些无法读取的错误,可能日志level设置错误,导致我困惑了一段时间
这里的推理使用了直接await
, 官方写法中推荐使用回调函数的形式,提高程序的运行效率,可以自行阅读官方源码进行修改
#include "DenseGradeWrapper.h"
DenseGradeWrapper::DenseGradeWrapper(QString xmlPath, QObject *parent)
: QObject{parent}
{
qDebug() << ov::get_openvino_version().buildNumber << " " << ov::get_openvino_version().description;
qDebug() << "Start To Compile CPU Model";
ov::Core core;
this->xmlModel = core.read_model(xmlPath.toStdString());
printInputAndOutputsInfo(*this->xmlModel);
double tS, tE;
tS = (double) clock();
this->compiledModel = core.compile_model(this->xmlModel, "CPU");
tE = (double) clock();
qDebug() << "Compile Done, cost " << (tE - tS) / (CLOCKS_PER_SEC) << " s";
}
DenseInferResult DenseGradeWrapper::infer(QString imageSrc){
double tS, tE;
tS = (double) clock();
cv::Mat transDim = this->preprocess(imageSrc);
tE = (double) clock();
qDebug() << "Preprocess Done, cost " << (tE - tS) / (CLOCKS_PER_SEC) << " s";
tS = (double) clock();
QVector<float> buffer = this->_infer(transDim);
tE = (double) clock();
qDebug() << "Infer Done, cost " << (tE - tS) / (CLOCKS_PER_SEC) << " s";
tS = (double) clock();
this->softmax(buffer);
DenseInferResult res = this->postProcess(buffer);
tE = (double) clock();
qDebug() << "Postprocess Done, cost " << (tE - tS) / (CLOCKS_PER_SEC) << " s";
return res;
}
QVector<float> DenseGradeWrapper::_infer(cv::Mat transDim){
ov::InferRequest inferRequest = this->compiledModel.create_infer_request();
ov::Shape inputShape = {1, 3, 224, 224};
ov::Tensor inputTensor = ov::Tensor(ov::element::f32, inputShape, transDim.data);
inferRequest.set_input_tensor(inputTensor);
// async
inferRequest.start_async();
inferRequest.wait();
const ov::Tensor& outputTensor = inferRequest.get_output_tensor();
const float *outputBuffer = outputTensor.data<const float>();
return QVector<float>(outputBuffer, (outputBuffer + outputTensor.get_size()));
}
void DenseGradeWrapper::softmax(QVector<float> &buffer){
float denominator = 0.0;
for(size_t i = 0; i < buffer.size(); i++){
buffer[i] = std::exp(buffer[i]);
denominator += buffer[i];
}
for(size_t i = 0; i < buffer.size(); i++){
buffer[i] /= denominator;
}
}
DenseInferResult DenseGradeWrapper::postProcess(const QVector<float> &buffer){
int index = buffer.indexOf(*std::max_element(buffer.begin(), buffer.end()));
DenseInferResult result;
result.index = index;
std::copy(buffer.begin(), buffer.end(), result.pie);
return result;
}
cv::Mat DenseGradeWrapper::preprocess(QString imageSrc){
cv::Mat cropMat = this->circleCrop(imageSrc);
std::vector<cv::Mat> batch_mat;
batch_mat.push_back(cropMat);
return cv::dnn::blobFromImages(batch_mat, 1.0 / 255.0, cv::Size(),
cv::Scalar(), true);
}
cv::Mat DenseGradeWrapper::circleCrop(QString imageSrc){
cv::Mat cropMask = NetworkTool::ToolCropImageFromMask(imageSrc);
return NetworkTool::ToolCropWithGaussian(cropMask);
}
main
这里给出简单的调用方式,至此OpenVINO的Cpp集成方式已经完成,后面的博客将会介绍如何集成Tensorrt版本的代码
// intel official instruction:
// https://docs.openvino.ai/2024/openvino-workflow/running-inference/integrate-openvino-with-your-application.html
#include <iostream>
#include <QCoreApplication>
#include "Network/DenseGradeWrapper.h"
int main(){
QString model_path = "../openvino-cpu-win-msvc2019/Script/export_dense121_cpu.xml";
QString image_path = "../openvino-cpu-win-msvc2019/Script/1.jpg";
DenseGradeWrapper denseWrapper(model_path);
DenseInferResult res = denseWrapper.infer(image_path);
qDebug() << "Level is: " << res.index;
qDebug() << "Each possible is: ";
for(int i = 0; i < 5; i++){
qDebug() << "\t" << res.pie[i];
}
}