cmake多文件、多文件夹编译(1)

一、完整工程案例

工程目录结构如下:

manyFolderCompile3:工程的根目录。

bin:会生成的可执行文件的目录,需要把图片放到这个目录。在没有改动“main.cpp”文件中的读取图片的名字情况下,图片的名字只能是“123.jpeg”。

build:会进入这个目录执行编译命令。

include:存放头文件的目录。

lib:生成链接库的目录。

src:存放源文件的目录

相应文件的内容如下:

CMakeLists.txt

cmake_minimum_required (VERSION 2.8)

set(CMAKE_POSITION_INDEPENDENT_CODE ON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
set(CUDA_TOOLKIT_ROOT_DIR /usr/local/cuda)

project (testCmake)
add_definitions(-std=c++11)

find_package(OpenCV REQUIRED)

set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/bin)

aux_source_directory(. ALL_SRCS)			# 添加当前目录下所有的源文件
add_subdirectory(src)						# 添加math子目录
add_executable(testCmake ${ALL_SRCS})	# 指定生成目标,注意这里要用${ALL_SRCS}!!!
target_link_libraries(testCmake power)	# 添加链接库,power是在math子目录的CMakeLists中定义的
target_link_libraries(testCmake ${OpenCV_LIBRARIES})

power.h

#ifndef POWER_H
#define POWER_H
double power(double base, int exponent);

#endif

main.cpp

#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include<opencv2/opencv.hpp>

#include "include/power.h"			//特别注意自己定义的函数头文件需要用引号括起来

using namespace std;
using namespace cv;

int main(int argc, char *argv[])
{
    if (argc < 3){
        printf("Usage: %s base exponent \n", argv[0]);
        return 1;
    }
    double base = atof(argv[1]);
    int exponent = atoi(argv[2]);
    double result = power(base, exponent);
    cout << base << ' ' << '^' << ' ' << exponent << ' ' << "is" << ' ' << result << endl;
    cout << "yes" << endl;

    Mat srcImage=imread("123.jpeg");
    imshow("Origin",srcImage);
    waitKey(0);

    return 0;
}

src/CMakeLists.txt

include_directories(
    ${PROJECT_SOURCE_DIR}/include
)
aux_source_directory(. LIB_SRCS)			# 添加当前目录下所有的源文件
add_library (power ${LIB_SRCS})				# 当前目录下的文件生成一个链接库

power.cpp

#include "power.h"			//特别注意自己定义的函数头文件需要用引号括起来

double power(double base, int exponent)
{
    int result = base;
    int i;
    
    if (exponent == 0) {
        return 1;
    }
    
    for(i = 1; i < exponent; ++i){
        result = result * base;
    }
    return result;
}

在创建完该工程后,进入build目录,执行如下命令:

cmake ..
make
cd ..
cd bin
./testCmake 2 2

其结果如下:

二、学习paddleX的CMakeLists.txt

        原地址是这里

        在外部指令中使用 ${} 进行变量的引用。在 IF 等语句中,是直接使用变量名而不通过 ${} 取值。

工程目录如下(仅仅列出涉及文件):

--cpp:sh scripts/build.sh命令执行路径

        --scripts:

                --build.sh

        --CMakeLists.txt

        --demo:

                --CMakeLists.txt

        --build:

scripts/build.sh 文件内容(该文件与PaddleX的源文件有所不同,修改小部分)

# 是否使用GPU(即是否使用 CUDA)
WITH_GPU=ON
# 使用MKL or openblas
WITH_MKL=ON
# 是否集成 TensorRT(仅WITH_GPU=ON 有效)
WITH_PADDLE_TENSORRT=OFF
# TensorRT 的路径,如果需要集成TensorRT,需修改为您实际安装的TensorRT路径
TENSORRT_DIR=$(pwd)/TensorRT/
# Paddle 预测库路径:本人已编译好Paddle的release/2.4,路径如下
PADDLE_DIR=/paddle/paddlepaddle/Paddle/build/paddle_inference_install_dir
# Paddle 的预测库是否使用静态库来编译
# 使用TensorRT时,Paddle的预测库通常为动态库
WITH_STATIC_LIB=OFF
# CUDA 的 lib 路径
CUDA_LIB=/usr/local/cuda/lib64
# CUDNN 的 lib 路径
CUDNN_LIB=/usr/lib
# 是否加密
WITH_ENCRYPTION=OFF
# OPENSSL 路径
OPENSSL_DIR=$(pwd)/deps/openssl-1.1.0k

{
    bash $(pwd)/scripts/bootstrap.sh # 下载预编译版本的加密工具和opencv依赖库
} || {
    echo "Fail to execute script/bootstrap.sh"
    exit -1
}

# 以下无需改动
rm -rf build
mkdir -p build
cd build
cmake .. \
    -DWITH_GPU=${WITH_GPU} \
    -DWITH_MKL=${WITH_MKL} \
    -DWITH_PADDLE_TENSORRT=${WITH_PADDLE_TENSORRT} \
    -DTENSORRT_DIR=${TENSORRT_DIR} \
    -DPADDLE_DIR=${PADDLE_DIR} \
    -DWITH_STATIC_LIB=${WITH_STATIC_LIB} \
    -DCUDA_LIB=${CUDA_LIB} \
    -DCUDNN_LIB=${CUDNN_LIB} \
    -DWITH_ENCRYPTION=${WITH_ENCRYPTION} \
    -DOPENSSL_DIR=${OPENSSL_DIR}
make -j16

./CMakeLists.txt

cmake_minimum_required(VERSION 3.0)  # 设置cmake最小版本
project(PaddleDeploy CXX C)  # 指定项目的名称

# option(<variable> "<help_text>" [value])   
# variable:定义选项名称  help_text:说明选项的含义 value:定义选项默认状态,一般是OFF或者ON,除去ON之外,其他所有值都为认为是OFF

if (WIN32)  # WIN32预定义宏
  option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static."   ON)
else()
  option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static."   OFF)
endif()
# Paddle
option(WITH_MKL        "Compile demo with MKL/OpenBlas support,defaultuseMKL."          ON)
option(WITH_GPU        "Compile demo with GPU/CPU, default use CPU."                    OFF)
option(WITH_PADDLE_TENSORRT "Compile demo with TensorRT."   OFF)
#other engine
option(WITH_OPENVINO "Compile demo with TensorRT."   OFF)
option(WITH_ONNX_TENSORRT "Compile demo with TensorRT."   OFF)

# set有三种用法普通变量、缓存条目、环境变量,这里是设置缓存条目,
# 除非用户进行设置或使用了选项FORCE,默认情况下缓存条目的值不会被覆盖
# 缓存条目的实质为可以跨层级进行传递的变量,类似于全局变量
# set(<variable> <value>... CACHE <type> <docstring> [FORCE])
SET(DEPS "" CACHE PATH "Location of libraries")
# Paddle
SET(TENSORRT_DIR "" CACHE PATH "Location of libraries")
SET(PADDLE_DIR "" CACHE PATH "Location of libraries")
SET(CUDA_LIB "" CACHE PATH "Location of libraries")
#OpenVINO
SET(GFLAGS_DIR "" CACHE PATH "Location of libraries")
SET(OPENVINO_DIR "" CACHE PATH "Location of libraries")
SET(NGRAPH_LIB "" CACHE PATH "Location of libraries")

SET(PROJECT_ROOT_DIR  "." CACHE PATH  "root directory of project.")

if (NOT WIN32)
    set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
    set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
    set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/demo)
else()
    set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
    set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
    set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
    add_definitions(-DPADDLEX_DEPLOY)  # 添加编译器选项定义
endif()

#project
include_directories("${PROJECT_SOURCE_DIR}")  # 添加头文件

# aux_source_directory(< dir > < variable >)
# 所有在指定路径下的源文件的文件名,并将输出结果列表储存在指定的变量中 不包括.h

# common
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/common/src SRC)
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/utils/src SRC)

# det seg clas pdx src
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppdet/src DETECTOR_SRC)
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppseg/src DETECTOR_SRC)
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppclas/src DETECTOR_SRC)
aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/paddlex/src DETECTOR_SRC)

#yaml-cpp
if(WIN32)
  SET(YAML_BUILD_SHARED_LIBS OFF CACHE BOOL "yaml build shared library.")
else()
  SET(YAML_BUILD_SHARED_LIBS ON CACHE BOOL "yaml build shared library.")
endif(WIN32)
include(${PROJECT_SOURCE_DIR}/cmake/yaml-cpp.cmake)
include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include")
link_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/lib")

#OPENCV
find_package(OpenCV REQUIRED)
message(STATUS "OpenCV library status:")
message(STATUS "    version: ${OpenCV_VERSION}")
message(STATUS "    libraries: ${OpenCV_LIBS}")
message(STATUS "    include path: ${OpenCV_INCLUDE_DIRS}")

set(DEPS ${DEPS} ${OpenCV_LIBS})
include_directories(${OpenCV_INCLUDE_DIRS})

# MD
macro(safe_set_static_flag)
    foreach(flag_var
        CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
        CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
      if(${flag_var} MATCHES "/MD")
        string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
      endif(${flag_var} MATCHES "/MD")
    endforeach(flag_var)
endmacro()

if (WIN32)
    add_definitions("/DGOOGLE_GLOG_DLL_DECL=")
    find_package(OpenMP REQUIRED)
    if (OPENMP_FOUND)
        message("OPENMP FOUND")
        set(CMAKE_C_FLAGS_DEBUG   "${CMAKE_C_FLAGS_DEBUG} ${OpenMP_C_FLAGS}")
        set(CMAKE_C_FLAGS_RELEASE  "${CMAKE_C_FLAGS_RELEASE} ${OpenMP_C_FLAGS}")
        set(CMAKE_CXX_FLAGS_DEBUG  "${CMAKE_CXX_FLAGS_DEBUG} ${OpenMP_CXX_FLAGS}")
        set(CMAKE_CXX_FLAGS_RELEASE   "${CMAKE_CXX_FLAGS_RELEASE} ${OpenMP_CXX_FLAGS}")
    endif()
    set(CMAKE_C_FLAGS_DEBUG   "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd")
    set(CMAKE_C_FLAGS_RELEASE  "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT")
    set(CMAKE_CXX_FLAGS_DEBUG  "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd")
    set(CMAKE_CXX_FLAGS_RELEASE   "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT")
    if (WITH_STATIC_LIB)
        safe_set_static_flag()
        add_definitions(-DSTATIC_LIB)
    endif()
else()
    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -o3 -fopenmp -std=c++11")
    set(CMAKE_STATIC_LIBRARY_PREFIX "")
    set(EXTERNAL_LIB "-ldl -lrt -lgomp -lz -lm -lpthread")
    set(DEPS ${DEPS} ${EXTERNAL_LIB})
endif()

# encryption
set(ENCRYPTION_SRC "")
if (WITH_ENCRYPTION)
  add_definitions(-DPADDLEX_DEPLOY_ENCRYPTION)
  set(CMAKE_C_FLAGS "-fPIC ${CMAKE_C_FLAGS}")
  set(CMAKE_CXX_FLAGS "-fPIC ${CMAKE_CXX_FLAGS}")

  include_directories("${OPENSSL_DIR}/install-${CMAKE_SYSTEM_PROCESSOR}/include")
  link_directories("${OPENSSL_DIR}/install-${CMAKE_SYSTEM_PROCESSOR}/lib")
  if (WIN32)
    set(DEPS ${DEPS} libssl_static${CMAKE_STATIC_LIBRARY_SUFFIX} libcrypto_static${CMAKE_STATIC_LIBRARY_SUFFIX})
  else ()
    set(DEPS ${DEPS} libssl${CMAKE_STATIC_LIBRARY_SUFFIX} libcrypto${CMAKE_STATIC_LIBRARY_SUFFIX})
   endif()
  aux_source_directory(${PROJECT_ROOT_DIR}/encryption/src ENCRYPTION_SRC)
  aux_source_directory(${PROJECT_ROOT_DIR}/encryption/util/src ENCRYPTION_SRC)
  aux_source_directory(${PROJECT_ROOT_DIR}/encryption/util/src/crypto ENCRYPTION_SRC)
endif()

# select engine
if(WITH_OPENVINO)
  add_subdirectory(demo/onnx_openvino)
else ()
  add_subdirectory(demo)
endif()

./demo/CMakeLists.txt

#paddle inference
if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "")
    message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir")
endif()

#paddle inference third party
include_directories("${PADDLE_DIR}")
include_directories("${PADDLE_DIR}/third_party/install/protobuf/include")
include_directories("${PADDLE_DIR}/third_party/install/glog/include")
include_directories("${PADDLE_DIR}/third_party/install/gflags/include")
include_directories("${PADDLE_DIR}/third_party/install/xxhash/include")
include_directories("${PADDLE_DIR}/third_party/install/cryptopp/include")

link_directories("${PADDLE_DIR}/paddle/lib/")
link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib")
link_directories("${PADDLE_DIR}/third_party/install/glog/lib")
link_directories("${PADDLE_DIR}/third_party/install/gflags/lib")
link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib")
link_directories("${PADDLE_DIR}/third_party/install/cryptopp/lib")

if (WIN32)
  set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/paddle_inference.lib)
  set(DEPS ${DEPS} glog gflags_static libprotobuf xxhash cryptopp-static libyaml-cppmt shlwapi)
else()
  if (WITH_STATIC_LIB)
    set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
    set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX})
  endif()
  set(DEPS ${DEPS} glog gflags protobuf xxhash cryptopp yaml-cpp)
endif(WIN32)

#MKL
if(WITH_MKL)
  ADD_DEFINITIONS(-DUSE_MKL)
  set(MKLML_PATH "${PADDLE_DIR}/third_party/install/mklml")
  include_directories("${MKLML_PATH}/include")
  if (WIN32)
    set(MATH_LIB ${MKLML_PATH}/lib/mklml.lib ${MKLML_PATH}/lib/libiomp5md.lib)
  else ()
    set(MATH_LIB ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} ${MKLML_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
    execute_process(COMMAND cp -r ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} /usr/lib)
  endif ()
  set(MKLDNN_PATH "${PADDLE_DIR}/third_party/install/mkldnn")
  if(EXISTS ${MKLDNN_PATH})
    include_directories("${MKLDNN_PATH}/include")
    if (WIN32)
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
    else ()
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
    endif ()
  endif()
else()
  set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()

set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB})

#set GPU
if (WITH_PADDLE_TENSORRT AND WITH_GPU)
  include_directories("${TENSORRT_DIR}/include")
  link_directories("${TENSORRT_DIR}/lib")

  file(READ ${TENSORRT_DIR}/include/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
  string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
    "${TENSORRT_VERSION_FILE_CONTENTS}")
  if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
    file(READ ${TENSORRT_DIR}/include/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
    string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
      "${TENSORRT_VERSION_FILE_CONTENTS}")
  endif()
  if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
    message(SEND_ERROR "Failed to detect TensorRT version.")
  endif()
  string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
    TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
  message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
    "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
endif()

if(WITH_GPU)
  if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "")
    message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda/lib64")
  endif()


  if(NOT WIN32)
    if (NOT DEFINED CUDNN_LIB)
      message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn/")
    endif()

    set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
    set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX})

    if (WITH_PADDLE_TENSORRT)
      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
    endif()

  else()
    set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
    set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
    set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})

    if (WITH_PADDLE_TENSORRT)
      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
      set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
      if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
        set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
      endif()
    endif()
  endif()
endif()

message("-----DEPS = ${DEPS}")

# engine src
set(ENGINE_SRC ${PROJECT_SOURCE_DIR}/model_deploy/engine/src/ppinference_engine.cpp)


add_executable(model_infer model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
ADD_DEPENDENCIES(model_infer ext-yaml-cpp)
target_link_libraries(model_infer ${DEPS})

add_executable(batch_infer batch_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
ADD_DEPENDENCIES(batch_infer ext-yaml-cpp)
target_link_libraries(batch_infer ${DEPS})

add_executable(multi_gpu_model_infer multi_gpu_model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
ADD_DEPENDENCIES(multi_gpu_model_infer ext-yaml-cpp)
target_link_libraries(multi_gpu_model_infer ${DEPS})

if (WITH_PADDLE_TENSORRT)
  add_executable(tensorrt_infer tensorrt_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  ADD_DEPENDENCIES(tensorrt_infer ext-yaml-cpp)
  target_link_libraries(tensorrt_infer ${DEPS})
endif()

if(WIN32)
  add_custom_command(TARGET model_infer POST_BUILD
    COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ${CMAKE_BINARY_DIR}/paddle_deploy
    COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ${CMAKE_BINARY_DIR}/paddle_deploy
    COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll  ${CMAKE_BINARY_DIR}/paddle_deploy
    COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/paddle/lib/paddle_inference.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  )
  if (WITH_PADDLE_TENSORRT)
    add_custom_command(TARGET model_infer POST_BUILD
      COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer.dll ${CMAKE_BINARY_DIR}/paddle_deploy
      COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer_plugin.dll ${CMAKE_BINARY_DIR}/paddle_deploy
    )
    if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
      add_custom_command(TARGET model_infer POST_BUILD
        COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/myelin64_1.dll ${CMAKE_BINARY_DIR}/paddle_deploy
      )
    endif()
  endif()
endif()

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值