目录
find_package 将会根据开发人员提前写好的库或者进程的依赖,自己去加载依赖项,可以大大减少程序员工作量。但是需要自己提供相应的 FindXXX.cmake 或者 XXXXConfig.cmake XXXX-config.cmake文件。
原始版本的CMakeLists.txt
TENSORRT相关依赖库,是通过set的方式,给TENSORRT_DIR赋值,拼接出include目录及lib,set的相关路径不含任何.cmake。即是依靠 T E N S O R R T D I R / i n c l u d e 及 {TENSORRT_DIR}/include及 TENSORRTDIR/include及{TENSORRT_DIR}/lib给定的TENSORRT依赖路径。再通过target_link_libraries(cucodes nvinfer nvonnxparser)方式人为指定需要链接的库。下面将通过两种方式自制.cmake文件,以简化库的引用过程。
cmake_minimum_required(VERSION 3.0)
project(pro)
add_definitions(-std=c++11)
option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_BUILD_TYPE Debug)
set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/workspace)
set(CUDA_GEN_CODE "-gencode=arch=compute_61,code=sm_61")
set(OpenCV_DIR "/usr/local/opencv/lib64/cmake/opencv4")
set(CUDA_DIR "/usr/local/cuda-11.6")
set(CUDNN_DIR "/usr/local/cuda-11.6")
set(TENSORRT_DIR "/usr/local/tensorRt8Target/TensorRT-8.5.1.7")
find_package(CUDA REQUIRED)
find_package(OpenCV)
include_directories(
${PROJECT_SOURCE_DIR}/src
${OpenCV_INCLUDE_DIRS}
${CUDA_DIR}/include
${TENSORRT_DIR}/include
${CUDNN_DIR}/include
)
link_directories(
${TENSORRT_DIR}/lib
${CUDA_DIR}/lib64
${CUDNN_DIR}/lib
)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -O0 -Wfatal-errors -pthread -w -g")
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -std=c++11 -O0 -Xcompiler -fPIC -g -w ${CUDA_GEN_CODE}")
#递归地添加的相关文件
file(GLOB_RECURSE cpp_srcs ${PROJECT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE c_srcs ${PROJECT_SOURCE_DIR}/src/*.c)
file(GLOB_RECURSE cuda_srcs ${PROJECT_SOURCE_DIR}/src/*.cu)
cuda_add_library(cucodes SHARED ${cuda_srcs})
add_executable(pro ${cpp_srcs} ${c_srcs})
target_link_libraries(cucodes nvinfer nvonnxparser)
target_link_libraries(cucodes cuda cublas cudart cudnn)
target_link_libraries(pro ${OpenCV_LIBS})
target_link_libraries(pro cucodes)
add_custom_target(
run
DEPENDS pro
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/workspace
COMMAND ./pro
)
find_package用于引入外部库,通过查找依赖库的文件路径,有两种模式:
以Module模式进行改造,自制FindXXX.cmake
在Module模式中,cmake需要找到一个叫做FindXXX.cmake的文件。这个文件负责找到库所在的路径,为我们的项目引入头文件路径和库文件路径。cmake搜索这个文件的路径有两个,一个是cmake安装目录下的/root/cmake-3.6.2/Modules目录,另一个使我们指定的CMAKE_MODULE_PATH的所在目录。
cmake_minimum_required(VERSION 3.0)
project(pro)
add_definitions(-std=c++11)
#这里指定了FindTensorRT.cmake的搜索路径,配合find_package(TensorRT)进行检索
set(CMAKE_MODULE_PATH "/usr/local/tensorRt8Target/TensorRT-8.5.1.7/cmake;${CMAKE_MODULE_PATH}")
option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_BUILD_TYPE Debug)
set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/workspace)
set(CUDA_GEN_CODE "-gencode=arch=compute_61,code=sm_61")
set(OpenCV_DIR "/usr/local/opencv/lib64/cmake/opencv4")
set(CUDA_DIR "/usr/local/cuda-11.6")
set(CUDNN_DIR "/usr/local/cuda-11.6")
find_package(CUDA REQUIRED)
find_package(OpenCV)
#搜寻tensorrt的相关库
find_package(TensorRT)
include_directories(
${PROJECT_SOURCE_DIR}/src
${OpenCV_INCLUDE_DIRS}
${CUDA_DIR}/include
#导出的变量
${TensorRT_INCLUDE_DIR}
${CUDNN_DIR}/include
)
MESSAGE(STATUS "TensorRT_INCLUDE_DIR: ${TensorRT_INCLUDE_DIR}")
MESSAGE(STATUS "TensorRT_LIBRARY: ${TensorRT_LIBRARY}")
link_directories(
#导出的变量
${TensorRT_LIBRARY_DIR}
${CUDA_DIR}/lib64
${CUDNN_DIR}/lib
)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -O0 -Wfatal-errors -pthread -w -g")
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -std=c++11 -O0 -Xcompiler -fPIC -g -w ${CUDA_GEN_CODE}")
file(GLOB_RECURSE cpp_srcs ${PROJECT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE c_srcs ${PROJECT_SOURCE_DIR}/src/*.c)
file(GLOB_RECURSE cuda_srcs ${PROJECT_SOURCE_DIR}/src/*.cu)
cuda_add_library(cucodes SHARED ${cuda_srcs})
add_executable(pro ${cpp_srcs} ${c_srcs})
#导出的变量
target_link_libraries(cucodes ${TensorRT_LIBRARY})
target_link_libraries(cucodes cuda cublas cudart cudnn)
target_link_libraries(pro ${OpenCV_LIBS})
target_link_libraries(pro cucodes)
add_custom_target(
run
DEPENDS pro
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/workspace
COMMAND ./pro
)
在源目录/usr/local/tensorRt8Target/TensorRT-8.5.1.7下,新建cmake文件夹,内置新文件FindTensorRT.cmake:内部定义了需要导出的变量:
#表示正在处理的CMakeLists.txt 文件的所在的目录的绝对路径(2.8.3 以及以后版本才支持)
cmake_minimum_required(VERSION 3.0)
set(TensorRT_INCLUDE_DIR ${CMAKE_CURRENT_LIST_DIR}/../include/)
set(TensorRT_LIBRARY_DIR ${CMAKE_CURRENT_LIST_DIR}/../lib)
set(TensorRT_LIBRARY nvinfer;nvonnxparser)
#外部可利用TensorRT_FOUND变量判断导出是否成功
if (TensorRT_INCLUDE_DIR AND TensorRT_LIBRARY)
set(TensorRT_FOUND TRUE)
else()
set(TensorRT_FOUND FALSE)
endif()
以Config模式进行改造,自制XXXXConfig.cmake或XXXX-config.cmake
如果Module模式搜索失败,没有找到对应的Find.cmake文件,则自动转入Config模式进行搜索。它主要通过XXXXConfig.cmake 或者XXXX-config.cmake这两个文件来引入我们需要的库。
只用分几步在FindXXX.cmake对应cmakelist.txt的基础上进行改造:
1.去掉源码中的
set(CMAKE_MODULE_PATH "/usr/local/tensorRt8Target/TensorRT-8.5.1.7/cmake;${CMAKE_MODULE_PATH}")
2.在find_package(TensorRT)前补充新的XXXXConfig.cmake搜索路径:
set(TensorRT_DIR "/usr/local/tensorRt8Target/TensorRT-8.5.1.7/cmake")
3.将FindTensorRT.cmake重命名为TensorRTConfig.cmake或TensorRT-config.cmake即可。