realsense D4xx红外图像的某个像素点转化到彩色图像中的对应像素点

一般是深度图像素和彩色图像素能对齐即匹配,如果是红外摄像头拍的原始左右眼红外图像呢?
其实深度图像素和单眼红外图像像素有个默认的对应关系:一般是跟左眼红外图像相同,也有跟右眼红外图像相同的,realsense是跟左眼的像素相同,即一一对应。
思路:通过以上关系,可以根据左眼红外图中的UV坐标获得其空间坐标即深度图中像素的空间坐标(这里的算法realsense没有公布,虽然是左右眼匹配fb/d公式算深度,但怎么匹配的没公布,这是我目前唯一不了解的内容了),然后转到彩色图空间坐标,最后从彩色图空间坐标转到彩色图UV坐标。完成了红外原始图中指定点在彩色图中的标示。
其实我是要用红外图指定UV点的空间坐标(如果不知道深度图像素与红外图像素的对应关系,是不可能拿到红外像素的深度继而计算空间点坐标的,只能靠自己匹配该点的左右眼图像坐标,然后根据f
b/d求其空间点坐标),后续的只是验证和显示用而已。
程序名:realsenseIRtoColor
代码如下:
#include
#include <math.h>
#include <opencv2/opencv.hpp> // Include OpenCV API
#include <librealsense2/rs.hpp> // Include RealSense Cross Platform API
#include<librealsense2/rsutil.h>
#include “example.hpp” // Include short list of convenience functions for rendering
#include “cv-helpers.hpp”

using namespace cv;
using namespace std;
using namespace rs2;
int U=0, V=0;
bool flag = false;
void on_mouse(int EVENT, int x, int y, int flags, void* userdata)
{
Mat hh;
hh = (Mat)userdata;
Point p(x, y);
switch (EVENT)
{
case EVENT_LBUTTONDOWN:
{
// printf(“b=%d\t”, hh.at§[0]);
// printf(“g=%d\t”, hh.at§[1]);
// printf(“r=%d\n”, hh.at§[2]);
printf(“x=%d\t”, x);
printf(“y=%d\n”, y);
U = x; V = y;
flag = true;
// std::cout << “x:” << x << “;” <<y ;
}
break;

}

}
int main(int argc, char * argv[]) try
{

int width = 640, height = 480;
int fps = 30;
char LName[100];//left 
char RName[100];//right 
char DName[100];//depth
char CName[100];//color
long long  i = 0;//counter
rs2::align align_to(RS2_STREAM_COLOR);
rs2::pipeline pipe;//Pipeline
rs2::config pipe_config;
pipe_config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
pipe_config.enable_stream(RS2_STREAM_INFRARED, 1, width, height, RS2_FORMAT_Y8, fps);
pipe_config.enable_stream(RS2_STREAM_INFRARED, 2, width, height, RS2_FORMAT_Y8, fps);
pipe_config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_BGR8, fps);

rs2::pipeline_profile profile = pipe.start(pipe_config);


while (1)
{
	//堵塞程序直到新的一帧捕获
	rs2::frameset frameset = pipe.wait_for_frames();

// frameset = align_to.process(frameset);//经测试,此句一定不能要,深度与彩色对齐后,深度坐标对应点会产生空洞(彩色图不受影响,每个坐标都有深度)导致定位出现明显偏差。
/*
rs2::depth_frame depth = frameset.get_depth_frame(); ///获取深度图像数据
rs2::video_frame color = frameset.get_color_frame(); ///获取彩色图像数据
rs2::video_frame ir_frame_left = frameset.get_infrared_frame(1);
rs2::video_frame ir_frame_right = frameset.get_infrared_frame(2);

	Mat dMat_left(Size(width, height), CV_8UC1, (void*)ir_frame_left.get_data());
	Mat dMat_right(Size(width, height), CV_8UC1, (void*)ir_frame_right.get_data());
	Mat depth_image(Size(width, height), CV_16U, (void*)depth.get_data(), Mat::AUTO_STEP);
	Mat color_image(Size(width, height), CV_8UC3, (void*)color.get_data(), Mat::AUTO_STEP);
	///将彩色图像和深度图像转换为Opencv格式
	cv::Mat image(cv::Size(width, height), CV_8UC3, (void*)color.get_data(), cv::Mat::AUTO_STEP);
	cv::Mat depthmat(cv::Size(width, height), CV_16U, (void*)depth.get_data(), cv::Mat::AUTO_STEP);
	///显示
	cv::imshow("image", image);
	cv::imshow("depth", depthmat);

*/
//取深度图和彩色图
depth_frame depth = frameset.get_depth_frame();
video_frame ir_frame_left = frameset.get_infrared_frame(1);
video_frame ir_frame_right = frameset.get_infrared_frame(2);
frame color_frame = frameset.get_color_frame();

	auto depth_stream = profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>();
	auto color_stream = profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>();

	//获取内参
	const auto intrinDepth = depth_stream.get_intrinsics();
	const auto intrinColor = color_stream.get_intrinsics();
	//直接获取从深度摄像头坐标系到彩色摄像头坐标系的欧式变换矩阵
	//auto  extrinDepth2Color=depth_stream.get_extrinsics_to(color_stream);
	rs2_extrinsics  extrinDepth2Color;
	rs2_error *error;
	rs2_get_extrinsics(depth_stream, color_stream, &extrinDepth2Color, &error);
	

	Mat dMat_left(Size(width, height), CV_8UC1, (void*)ir_frame_left.get_data());
	Mat dMat_right(Size(width, height), CV_8UC1, (void*)ir_frame_right.get_data());
	Mat depth_image(Size(width, height), CV_16U, (void*)depth.get_data(), Mat::AUTO_STEP);
	Mat color_image(Size(width, height), CV_8UC3, (void*)color_frame.get_data(), Mat::AUTO_STEP);
	imshow("红外1", dMat_left);
	
	setMouseCallback("红外1", on_mouse, &dMat_left);
	if (flag)
	{
		float udist = depth.get_distance(U, V);

// printf(“z=%f\n”, udist);
Point pinfrared,pcolor;
pinfrared.x = U;
pinfrared.y = V;
//画实心点
circle(depth_image, pinfrared, 3, Scalar(255, 0, 0), -1); //第五个参数我设为-1,表明这是个实点。
//平面点定义
float pd_uv[2], pc_uv[2], depth_m;
//空间点定义
float Pdc3[3], Pcc3[3];

		pd_uv[0] = U; pd_uv[1] = V; depth_m = udist;
		//将深度图的像素点根据内参转换到深度摄像头坐标系下的三维点
		rs2_deproject_pixel_to_point(Pdc3, &intrinDepth, pd_uv, depth_m);
		//将深度摄像头坐标系的三维点转化到彩色摄像头坐标系下
		rs2_transform_point_to_point(Pcc3, &extrinDepth2Color, Pdc3);
		//将彩色摄像头坐标系下的深度三维点映射到二维平面上
		rs2_project_point_to_pixel(pc_uv, &intrinColor, Pcc3);
		pcolor.x = pc_uv[0]; pcolor.y = pc_uv[1];
		circle(color_image, pcolor, 3, Scalar(255, 0, 0), -1); //第五个参数我设为-1,表明这是个实点。

// flag = false;

	}


	imshow("红外2", dMat_right);
	imshow("深度图", depth_image);
	imshow("彩图", color_image);
	char c = waitKey(1);
	if (c == 'p')
	{

/*
sprintf_s(LName, “C:\cpppractice\D435iimshow\x64\Debug\left_eye\%d.png”, i);
imwrite(LName, dMat_left);
sprintf_s(RName, “C:\cpppractice\D435iimshow\x64\Debug\right_eye\%d.png”, i);
imwrite(RName, dMat_right);
sprintf_s(DName, “C:\cpppractice\D435iimshow\x64\Debug\depth\%d.png”, i);
imwrite(DName, depth_image);
sprintf_s(CName, “C:\cpppractice\D435iimshow\x64\Debug\color\%d.png”, i);
imwrite(CName, color_image);
*/
sprintf_s(LName, “left_eye%d.png”, i);
imwrite(LName, dMat_left);
sprintf_s(RName, “right_eye%d.png”, i);
imwrite(RName, dMat_right);
sprintf_s(DName, “depth%d.png”, i);
imwrite(DName, depth_image);
sprintf_s(CName, “color%d.png”, i);
imwrite(CName, color_image);
i++;
}
else if (c == ‘q’)
break;

}
i = 0;
return EXIT_SUCCESS;

}
catch (const rs2::error & e)
{
///捕获相机设备的异常
std::cerr << "RealSense error calling " << e.get_failed_function() << “(” << e.get_failed_args() << "):\n " << e.what() << std::endl;
return EXIT_FAILURE;
}
catch (const std::exception& e)
{
std::cerr << "Other error : " << e.what() << std::endl;
return EXIT_FAILURE;
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值