vscode配置c++和Python,及cmake的使用

配置Python

直接运行就可以,配置调试模式,生成lacunch文件,直接调试就可以

配置c++

{
	"version": "2.0.0",
	"tasks": [
		{
			"type": "cppbuild",
			"label": "C/C++: g++.exe 生成活动文件",
			"command": "E:\\cpp\\mingw64\\bin\\g++.exe",
			"args": [
				"-fdiagnostics-color=always",// 颜色
				"-g",// 调试信息
				"${file}",// 文件本身,仅适用于C++基础知识教学,无法同时编译所有文件
				"-Wall", // 开启所有警告
				"-std=c++14", // c++14标准
				"-o",//输出
				//"${fileDirname}\\${fileBasenameNoExtension}.exe"
				"${workspaceFolder}/src/${fileBasenameNoExtension}" // 文件所在的文件夹路径/src/当前文件的文件名,不带后缀
			],
			"options": {
				"cwd": "${fileDirname}"
			},
			"problemMatcher": [
				"$gcc"
			],
			"group": "build",
			"detail": "编译器: E:\\cpp\\mingw64\\bin\\g++.exe"
		}
	]
}

配置C++的调试文件,主要修改2个地方

"program": "${workspaceFolder}/src/${fileBasenameNoExtension}",

"preLaunchTask": "C/C++: g++.exe 生成活动文件",

把这个删掉

 "pipeTransport": {
                "debuggerPath": "E:\\cpp\\mingw64\\bin\\g++.exe",
                "pipeProgram": "${env:windir}\\system32\\bash.exe",
                "pipeArgs": ["-c"],
                "pipeCwd": ""
            },
 "version": "0.2.0",
    "configurations": [
        {
            "name": "g++ 调试",
            "type": "cppdbg",
            "request": "launch",
            "program": "${workspaceFolder}/src/${fileBasenameNoExtension}",
            "args": [],
            "stopAtEntry": false,
            "cwd": "${fileDirname}",
            "environment": [],
            "externalConsole": false,
            "setupCommands": [
                {
                    "description": "为 gdb 启用整齐打印",
                    "text": "-enable-pretty-printing",
                    "ignoreFailures": true
                },
                {
                    "description": "将反汇编风格设置为 Intel",
                    "text": "-gdb-set disassembly-flavor intel",
                    "ignoreFailures": true
                }
            ],
            "preLaunchTask": "C/C++: g++.exe 生成活动文件",
        },
        {
            "name": "Python: debge",
            "type": "python",
            "request": "launch",
            "program": "${file}",
            "console": "integratedTerminal",
            "justMyCode": true
        }
    ]
}

cmake的使用

需要构建cmake文件,文件名为CMakeLists.txt

重要 的3部分,

# 最低版本要求
cmake_minimum_required(VERSION 3.10)

# 项目信息
project(test_account)

# 添加动态库
add_library(Account SHARED "./account_dir/Account.cpp" "./account_dir/Account.h")

# 添加可执行文件
add_executable(test_account "./test_account/test_account.cpp")

# 添加头文件
target_include_directories(test_account PUBLIC "./account_dir")
# 添加链接库
target_link_libraries(test_account Account)

输出依赖文件

同时添加动态和静态库文件,在一个文件里面就直接用公共的头文件就可以

include_directories(include)

cmake_minimum_required(VERSION 3.10)

project(instal_demo)

# 添加头文件
include_directories(include)

# 添加静态库
add_library(slib STATIC src/slib.cpp include/slib.h)
##添加静态库头文件路径
#target_include_directories(slib PUBLIC include)
# 添加动态库
add_library(dlib SHARED src/dlib.cpp include/dlib.h)
## 添加动态库头文件路径
#target_include_directories(dlib PUBLIC include)

# 设置RPATH,否则install后,运行时找不到动态库
SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) 
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")

# 添加可执行文件
add_executable(instal_demo main.cpp)
# 链接库
target_link_libraries(instal_demo slib dlib)

# 设置公共头文件,以便install时,将头文件一起安装
set_target_properties(slib PROPERTIES PUBLIC_HEADER include/slib.h)
set_target_properties(dlib PROPERTIES PUBLIC_HEADER include/dlib.h)
# 设置安装
install(TARGETS instal_demo slib dlib#动态静态可执行文件都叫TARGETS 
        RUNTIME DESTINATION bin # 可执行文件
        LIBRARY DESTINATION lib # 动态库
        ARCHIVE DESTINATION lib # 静态库
        PUBLIC_HEADER DESTINATION include # 公共头文件
        )
install(DIRECTORY include/ DESTINATION include)
# 安装头文件 把.h文件也安装过去
# install(DIRECTORY include/ DESTINATION include)

# 设置安装
install(TARGETS instal_demo slib dlib#动态静态可执行文件都叫TARGETS 
        RUNTIME DESTINATION bin # 可执行文件
        LIBRARY DESTINATION lib # 动态库
        ARCHIVE DESTINATION lib # 静态库
        )
        
#[[
如果不设置DCMAKE_INSTALL_PREFIX ,则会安装到 /usr/local 目录下
cmake -S . -B build -DCMAKE_INSTALL_PREFIX=./installed
或者其他目录
cmake -S . -B build -DCMAKE_INSTALL_PREFIX=~/Documents/install_demo
cmake --build build
cmake --install build
]]        

找依赖库

# 最低版本要求
cmake_minimum_required(VERSION 3.10)

# 项目信息
project(trt_demo LANGUAGES CXX CUDA)

# 添加CMAKE_MODULE_PATH,否则找不到FindTensorRT.cmake
list (APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)

# 寻找TensorRT库
find_package(TensorRT REQUIRED)

if (TensorRT_FOUND)
    message(STATUS "Found TensorRT ${TensorRT_VERSION} in ${TensorRT_ROOT_DIR}")
    message(STATUS "TensorRT libraries: ${TensorRT_LIBRARIES}")
    message(STATUS "TensorRT include files: ${TensorRT_INCLUDE_DIRS}")
else()
    message(FATAL_ERROR "Cannot find TensorRT")

endif()

# 添加可执行文件
add_executable(build src/build.cpp)

# 头文件
target_include_directories(build PRIVATE ${TensorRT_INCLUDE_DIRS})
# 链接库
target_link_libraries(build PRIVATE ${TensorRT_LIBRARIES})


# 添加可执行文件
add_executable(runtime src/runtime.cu)

# 头文件
target_include_directories(runtime PRIVATE ${TensorRT_INCLUDE_DIRS})
# 链接库
target_link_libraries(runtime PRIVATE ${TensorRT_LIBRARIES})

cmake编译报错

nvcc fatal : Cannot find compiler 'cl.exe' in PATH

需要将C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Tools\MSVC\14.35.32215\bin\Hostx64\x64添加到环境变量中

报错No CUDA toolset found.

Got the issue solved by manually copying the files from C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1\extras\visual_studio_integration\MSBuildExtensions to C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\MSBuild\Microsoft\VC\v160\BuildCustomizations.(不同的版本对应不同的位置)

如果报错Could NOT find TensorRT,将TensorRT添加到环境变量中

By not providing "FindTensorRT.cmake" in CMAKE_MODULE_PATH this project has
  asked CMake to find a package configuration file provided by "TensorRT",
  but CMake did not find one.

  Could not find a package configuration file provided by "TensorRT" with any
  of the following names:

    TensorRTConfig.cmake
    tensorrt-config.cmake

  Add the installation prefix of "TensorRT" to CMAKE_PREFIX_PATH or set
  "TensorRT_DIR" to a directory containing one of the above files.  If
  "TensorRT" provides a separate development package or SDK, be sure it has
  been installed.

添加变量文件

## find tensorrt
include(FindPackageHandleStandardArgs)

## 用户可以输入的TensorRT 搜索路径
set(TensorRT_ROOT
	""
	CACHE
	PATH
	"TensorRT root directory")

## 设置TensorRT 搜索路径
set(TensorRT_SEARCH_PATH
  /usr/include/x86_64-linux-gnu
  /usr/src/tensorrt
  /usr/lib/x86_64-linux-gnu
  ${TensorRT_ROOT}
)

## 设置需要搜索的TensorRT 依赖库
set(TensorRT_ALL_LIBS
  nvinfer
  nvinfer_plugin
  nvparsers
  nvonnxparser
)

## 提前设置后面需要用的变量
set(TensorRT_LIBS_LIST)
set(TensorRT_LIBRARIES)

## 搜索头文件的路径
find_path(
  TensorRT_INCLUDE_DIR
  NAMES NvInfer.h
  PATHS ${TensorRT_SEARCH_PATH}
)

## 利用头文件路径下的version文件来设置TensorRT的版本信息
if(TensorRT_INCLUDE_DIR AND EXISTS "${TensorRT_INCLUDE_DIR}/NvInferVersion.h")
  file(STRINGS "${TensorRT_INCLUDE_DIR}/NvInferVersion.h" TensorRT_MAJOR REGEX "^#define NV_TENSORRT_MAJOR [0-9]+.*$")
  file(STRINGS "${TensorRT_INCLUDE_DIR}/NvInferVersion.h" TensorRT_MINOR REGEX "^#define NV_TENSORRT_MINOR [0-9]+.*$")
  file(STRINGS "${TensorRT_INCLUDE_DIR}/NvInferVersion.h" TensorRT_PATCH REGEX "^#define NV_TENSORRT_PATCH [0-9]+.*$")

  string(REGEX REPLACE "^#define NV_TENSORRT_MAJOR ([0-9]+).*$" "\\1" TensorRT_VERSION_MAJOR "${TensorRT_MAJOR}")
  string(REGEX REPLACE "^#define NV_TENSORRT_MINOR ([0-9]+).*$" "\\1" TensorRT_VERSION_MINOR "${TensorRT_MINOR}")
  string(REGEX REPLACE "^#define NV_TENSORRT_PATCH ([0-9]+).*$" "\\1" TensorRT_VERSION_PATCH "${TensorRT_PATCH}")
  set(TensorRT_VERSION_STRING "${TensorRT_VERSION_MAJOR}.${TensorRT_VERSION_MINOR}.${TensorRT_VERSION_PATCH}")
endif()
message("TensorRT version: ${TensorRT_VERSION_STRING}")

## 搜索sample code的路径
find_path(
  TensorRT_SAMPLE_DIR
  NAMES trtexec/trtexec.cpp
  PATHS ${TensorRT_SEARCH_PATH}
  PATH_SUFFIXES samples
)

## 依次搜索TensorRT依赖库
foreach(lib ${TensorRT_ALL_LIBS} )
  find_library(
    TensorRT_${lib}_LIBRARY
    NAMES ${lib}
    PATHS ${TensorRT_SEARCH_PATH}
  )
  ## 存储TensorRT的依赖库变量
  set(TensorRT_LIBS_VARS TensorRT_${lib}_LIBRARY ${TensorRT_LIBS_LIST})
  ## 也是TensorRT的依赖库,存成list,方便后面用foreach
  list(APPEND TensorRT_LIBS_LIST TensorRT_${lib}_LIBRARY)
endforeach()

## 调用cmake内置功能,设置基础变量如xxx_FOUND
find_package_handle_standard_args(TensorRT REQUIRED_VARS TensorRT_INCLUDE_DIR TensorRT_SAMPLE_DIR ${TensorRT_LIBS_VARS})

if(TensorRT_FOUND)
  ## 设置Tensor_LIBRARIES变量
  foreach(lib ${TensorRT_LIBS_LIST} )
    list(APPEND TensorRT_LIBRARIES ${${lib}})
  endforeach()
  message("Found TensorRT: ${TensorRT_INCLUDE_DIR} ${TensorRT_LIBRARIES} ${TensorRT_SAMPLE_DIR}")
  message("TensorRT version: ${TensorRT_VERSION_STRING}")
endif()

报错

 Policy CMP0104 is not set: CMAKE_CUDA_ARCHITECTURES now detected for NVCC,
  empty CUDA_ARCHITECTURES not allowed.  Run "cmake --help-policy CMP0104"
  for policy details.  Use the cmake_policy command to set the policy and
  suppress this warning.

在cmkae文件中加入

# 显卡算力
if(NOT DEFINED CMAKE_CUDA_ARCHITECTURES)
  set(CMAKE_CUDA_ARCHITECTURES 86)
endif()

报错

添加环境变量(linux)

export PATH=/usr/local/cuda-11.1/bin${PATH:+:${PATH}}
export LD_LIBRARY_PATH=/usr/local/cuda-11.1/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
export CUDA_HOME="/usr/local/cuda-11.1/"
export CPATH=/usr/local/cuda-11.1/targets/x86_64-linux/include:$CPATH
export LIBRARY_PATH=$LIBRARY_PATH:/usr/local/cuda-11.1/lib64

报错

添加环境变量(win) 

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值