这款相机
根据官网的sdk 写了一个一台相机的采集代码
#define MAP_DEPTH_TO_COLOR 1
#include "../common/common.hpp"
#include "TYImageProc.h"
#include <opencv2/opencv.hpp>
#include <string>
#include "DepthRender.hpp"
#include <limits>
#include <iostream>
#include <pcl/point_cloud.h>
#include <pcl/io/io.h>
#include <pcl/io/pcd_io.h>
#include <pcl/filters/statistical_outlier_removal.h>
#include <pcl/point_types.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/filters/filter.h>
#include <pcl/common/impl/io.hpp>
#include <time.h>
TY_INTERFACE_HANDLE hIface;
TY_INTERFACE_INFO infoInterface;
TY_CAMERA_CALIB_INFO calibDepth;
int32_t IDComponents;
TY_DEV_HANDLE handle;
static uint32_t deviceNumber;//检测获取相机数量
TY_ISP_HANDLE hColorIspHandle = NULL;
using namespace std;
using namespace pcl;
std::vector<TY_DEVICE_BASE_INFO> selected;
void eventCallback(TY_EVENT_INFO* event_info, void* userdata)
{
if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {
LOGD("=== Event Callback: Device Offline!");
// Note:
// Please set TY_BOOL_KEEP_ALIVE_ONOFF feature to false if you need to debug with breakpoint!
}
else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {
LOGD("=== Event Callback: License Error!");
}
}
char* IDDevice;
void openComponents(TY_DEV_HANDLE handle, int32_t componentIDs)
{
ASSERT_OK(TYGetComponentIDs(handle, &IDComponents)); //获取设备支持的组件信息
if (IDComponents & TY_COMPONENT_RGB_CAM & componentIDs)
ASSERT_OK(TYEnableComponents(handle, TY_COMPONENT_RGB_CAM));
if (IDComponents & TY_COMPONENT_IR_CAM_LEFT & componentIDs) //打开左边红外
ASSERT_OK(TYEnableComponents(handle, TY_COMPONENT_IR_CAM_LEFT));
if (IDComponents & TY_COMPONENT_IR_CAM_RIGHT & componentIDs) //打开右边红外
ASSERT_OK(TYEnableComponents(handle, TY_COMPONENT_IR_CAM_RIGHT));
if (IDComponents & TY_COMPONENT_DEPTH_CAM & componentIDs) //打开深度摄像头
ASSERT_OK(TYEnableComponents(handle, TY_COMPONENT_DEPTH_CAM));
}
//彩色图与深度图对齐
static void doRgbRegister(const TY_CAMERA_CALIB_INFO& depth_calib, const TY_CAMERA_CALIB_INFO& color_calib, const cv::Mat& depth, const cv::Mat& color, cv::Mat& out)
{
// do rgb undistortion
TY_IMAGE_DATA src;
src.width = color.cols;
src.height = color.rows;
src.size = color.size().area() * 3;
src.pixelFormat = TY_PIXEL_FORMAT_RGB;
src.buffer = color.data;
cv::Mat undistort_color = cv::Mat(color.size(), CV_8UC3);
TY_IMAGE_DATA dst;
dst.width = color.cols;
dst.height = color.rows;
dst.size = undistort_color.size().area() * 3;
dst.buffer = undistort_color.data;
dst.pixelFormat = TY_PIXEL_FORMAT_RGB;
ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));
// do register
out.create(depth.size(), CV_8UC3);
//将原始 RGB 图像映射到深度坐标 RGB 图像。
ASSERT_OK(
TYMapRGBImageToDepthCoordinate(
&depth_calib,
depth.cols, depth.rows, depth.ptr<uint16_t>(),
&color_calib,
undistort_color.cols, undistort_color.rows, undistort_color.ptr<uint8_t>(),
out.ptr<uint8_t>()));
}
void f() {
try {
std::string ID, IP;
int32_t IDComponents = 0;
ASSERT_OK(selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected));//获取相机信息
}
catch (int num){
cout << "null" << endl;
return;
}
}
int main() {
std::string ID, IP;
int32_t IDComponents = 0;
ASSERT_OK(TYInitLib()); //初始化
f();
//例外处理程序段
ASSERT(selected.size() > 0);
TY_DEVICE_BASE_INFO& selectedDev = selected[0];
ASSERT_OK(TYOpenInterface(selectedDev.iface.id, &hIface)); //打开指定的接口
ASSERT_OK(TYOpenDevice(hIface, selectedDev.id, &handle));//按设备 ID 打开设备
ASSERT_OK(TYGetDeviceInfo(handle, &selectedDev)); //获取打开设备的基本信息。
{
LOGD("=== selected device %s:", selectedDev.id);
}
openComponents(handle, TY_COMPONENT_RGB_CAM | TY_COMPONENT_IR_CAM_LEFT |
TY_COMPONENT_IR_CAM_RIGHT | TY_COMPONENT_DEPTH_CAM);
//设备几个连接
ASSERT_OK(TYGetDeviceNumber(hIface, &deviceNumber));
std::cout << deviceNumber;
//ASSERT_OK(TYGetComponentIDs(hIface, &IDComponents)); //获取设备支持的组件信息
LOGD("=== Configure feature, set resolution to 640x480.");
//设置深度图像
TYSetEnum(hIface, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_1280x960); //TY_IMAGE_MODE_DEPTH16_1280x960
//设置彩色图像
TYSetEnum(hIface, TY_COMPONENT_RGB_CAM_LEFT, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_1280x960);//TY_IMAGE_MODE_YUYV_1280x960
uint32_t frameSize;
ASSERT_OK(TYGetFrameBufferSize(handle, &frameSize));
LOGD(" - Get size of framebuffer, %d", frameSize);
LOGD(" - Allocate & enqueue buffers");
//缓存区
char* frameBuffer[3];
frameBuffer[0] = new char[frameSize];
frameBuffer[1] = new char[frameSize];
frameBuffer[2] = new char[frameSize];
LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);
ASSERT_OK(TYEnqueueBuffer(handle, frameBuffer[0], frameSize));
LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);
ASSERT_OK(TYEnqueueBuffer(handle, frameBuffer[1], frameSize));
LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[2], frameSize);
ASSERT_OK(TYEnqueueBuffer(handle, frameBuffer[2], frameSize));
LOGD("Register event callback");
ASSERT_OK(TYRegisterEventCallback(handle, eventCallback, NULL));
TY_FRAME_DATA frame;
//相机设置
TYEnableComponents(handle, TY_COMPONENT_RGB_CAM | TY_COMPONENT_DEPTH_CAM | TY_COMPONENT_IR_CAM_LEFT | TY_COMPONENT_IR_CAM_RIGHT);
LOGD("Configure components, open depth cam");
LOGD("Register event callback");
//注册事件回调
ASSERT_OK(TYRegisterEventCallback(handle, eventCallback, NULL));
bool hasTrigger;
ASSERT_OK(TYHasFeature(handle, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &hasTrigger));
if (hasTrigger) {
LOGD("Disable trigger mode");
TY_TRIGGER_PARAM trigger;
trigger.mode = TY_TRIGGER_MODE_OFF;
ASSERT_OK(TYSetStruct(handle, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));
}
LOGD("Start capture");
//打开相机
ASSERT_OK(TYStartCapture(handle));
LOGD("While loop to fetch frame");
boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer1(new pcl::visualization::PCLVisualizer("3D Viewer"));
while (!viewer1->wasStopped()) {
int err = TYFetchFrame(handle, &frame, -1);
cv::Mat depth, irl, irr, color;
//获取帧数据
parseFrame(frame, &depth, &irl, &irr, &color, hColorIspHandle);
TY_IMAGE_DATA dpt;
dpt.width = depth.cols;
dpt.height = depth.rows;
dpt.size = depth.size().area() * 3;
dpt.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
dpt.buffer = depth.data;
//定义滤波函数
DepthSpeckleFilterParameters Spt;
Spt.max_speckle_diff = 150;
Spt.max_speckle_size =64;
//实现滤波
ASSERT_OK(TYDepthSpeckleFilter(&dpt, &Spt));
//TY_IMAGE_DATA克隆出depth
depth = cv::Mat(dpt.height, dpt.width, CV_16U, dpt.buffer).clone();
std::vector<TY_VECT_3F> p3d;
TY_CAMERA_CALIB_INFO depth_calib;
TY_CAMERA_CALIB_INFO color_calib;
pcl::PointCloud<pcl::PointXYZRGB> cloud;
pcl::PointXYZRGB point;
p3d.resize(depth.size().area());
///< the threshold of the noise filter, 0 for disabled
TYGetStruct(handle, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA, &depth_calib, sizeof(depth_calib)); // 提取深度相机的标定数据
TYGetStruct(handle, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &color_calib, sizeof(color_calib)); // 提取RGB相机的标定数据
TYMapDepthImageToPoint3d(&depth_calib, depth.cols, depth.rows, (uint16_t*)depth.data, &p3d[0]); // 深度图像->xyz点云
cv::Mat color_data_mat;
if (!color.empty())
{
bool hasColorCalib;
TYHasFeature(handle, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &hasColorCalib);// 查询有无彩色相机标定参数这一属性
if (hasColorCalib)
{
doRgbRegister(depth_calib, color_calib, depth, color, color_data_mat);// 输入深度相机标定数据、彩色相机标定数据、深度图和彩色图,输出对齐后的彩色图
cv::cvtColor(color_data_mat, color_data_mat, cv::COLOR_BGR2RGB); // BGR 格式转换为 RGB 格式
}
}
for (int m = 0; m < depth.rows; m++)
{
for (int n = 0; n < depth.cols; n++)
{
//将p3d 的 值赋值到新的point
point.x = p3d[(m * (depth.cols) + n)].x;
point.y = p3d[(m * (depth.cols) + n)].y;
point.z = p3d[(m * (depth.cols) + n)].z;
point.r = color_data_mat.at<cv::Vec3b>(m, n)[0];
point.g = color_data_mat.at<cv::Vec3b>(m, n)[1];
point.b = color_data_mat.at<cv::Vec3b>(m, n)[2];
constexpr auto PLAIN_THRESH = 2000;
//为了去除点云复杂背景
//if (point.z > PLAIN_THRESH&& point.z > 800) {
// continue;
//}
//else {
// //添加点云
// cloud.points.push_back(point);
//}
// 构造xyzrgb类型点云
//如果想去除背景就修改2000 和800 的值或者根据自己的需求 自己调整x y z
//下面是获取完整点云的
cloud.points.push_back(point);
}
}
pcl::PointCloud<pcl::PointXYZRGB>::Ptr basic_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGB>);
basic_cloud_ptr = cloud.makeShared(); // 转换为指针格式 basic_cloud_ptr
basic_cloud_ptr->is_dense = false; // 自己创建的点云,默认为dense,需要修改属性,否则removenanfrompointcloud函数无效
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGB>);
std::vector<int> mapping;
pcl::removeNaNFromPointCloud(*basic_cloud_ptr, *cloud_ptr, mapping); // 移除无效点
cloud.width = (uint32_t)cloud.points.size();
cloud.height = 1;
viewer1->removeAllPointClouds(); // 移除当前所有点云
//点云去噪
pcl::StatisticalOutlierRemoval<pcl::PointXYZRGB> sor1;
sor1.setInputCloud(cloud_ptr);
sor1.setMeanK(50);
sor1.setStddevMulThresh(3);
sor1.filter(*cloud_ptr);
//点云可视化
viewer1->addPointCloud<pcl::PointXYZRGB>(cloud_ptr, "initial");
viewer1->updatePointCloud(cloud_ptr, "initial");
viewer1->spinOnce(10);
//判断相机拍摄的图片是否为null
if (cloud_ptr->points.size() > 0) {
time_t timer = time(0);
struct tm* t = localtime(&timer);
char timeBuffer[30];
//获取具体时间
strftime(timeBuffer, sizeof(timeBuffer), "%Y%m%d%H%M%S", t);
std::ostringstream ostr;
IDDevice = selectedDev.id;//获取相机id
ostr << ".\\Files\\PCD\\CAM" << IDDevice << "\\" << timeBuffer << "RGB.pcd";//点云保存路径
std::string fpath;
fpath = ostr.str();
pcl::io::savePCDFile<pcl::PointXYZRGB>(fpath, *cloud_ptr, true); //将点云保存到PCD文件中
}
else {
printf("NUll");
}
TYISPUpdateDevice(hColorIspHandle);//用于更新和控制ISP的设备状态
LOGD("Re-enqueue buffer(%p, %d)", frame.userBuffer, frame.bufferSize);
ASSERT_OK(TYEnqueueBuffer(handle, frame.userBuffer, frame.bufferSize)); //将分配的缓冲区排队
}
//关闭 api和其他接口
ASSERT_OK(TYStopCapture(handle));
ASSERT_OK(TYCloseDevice(handle));
ASSERT_OK(TYISPRelease(&hColorIspHandle));
ASSERT_OK(TYDeinitLib());
LOGD("Main done!");
return 0;
}
//部分函数解释
// LOGD 相当于 printf 只能有用相机信息的打印
//f 函数本来想实现 py中try的效果(奈何失败了)