手眼标定的原理与实现

1、手眼标定原理部分

  1. 手眼标定的核心问题在与求解AX=XB这个方程,只是针对于眼在手上和眼在手外时,A,B,X的定义略有差异。
  2. 这篇文章的下面这张图很形象的说明了手眼标定中眼在手上的问题。
    在这里插入图片描述
  3. 这篇文章很好地解释了手眼标定中眼在手上和眼在手外的问题。
  4. 这篇文章是AX=XB这个方程的另外一种求解方式。
  5. 这篇文章是一个眼在手上的C++代码实现例子。

2、手眼标定实现部分(眼在手上)

  1. 数据类型定义
struct CalibrationInitParams
{
	cv::Size CalibrateBoardSize = cv::Size(11, 8);
	float CalibrateBoardLength = 30;
};
	std::vector<std::vector<double>> mvvRobotPose;

	//内参和畸变
	cv::Mat mintrinsicParam;
	cv::Mat mdistortParam;
	//旋转和平移
	cv::Mat mrotationParam;
	cv::Mat mtranslationParam;
	//单目标定的旋转和平移
	std::vector<cv::Mat> mRvecs, mTvecs;
	//图像大小
	cv::Size mImageSize = cv::Size(0, 0);
	CalibrationInitParams mInitParams;
	
	//标准标定板三维点
	std::vector<cv::Point3f> mvStandardCalibrateBoard;
	//标准标定板三维点 []:图像序号; []:空间三维点
	std::vector<std::vector<cv::Point3f>> mvvStandardCalibrateBoard;
	//[]:图像序号; []:图像二维点
	std::vector<std::vector<cv::Point2f>> mvvCalibrateBoard2DPoints;
  1. 主要实现部分:mvvStandardCalibrateBoard是标定板(例如棋盘格标定板)上的三维点;mvvCalibrateBoard2DPoints是标定板上检测到的二维点;mintrinsicParam是相机的内部参数;mdistortParam是相机的畸变参数;机器人旋转使用的是ZYZ,如果是XYZ的话,需要修改旋转矩阵,参考
void runCalibrateHandEye(double& error)
{
	//定义手眼标定矩阵
	std::vector<Mat> R_gripper2base;
	std::vector<Mat> t_gripper2base;
	std::vector<Mat> R_target2cam;
	std::vector<Mat> t_target2cam;
	Mat R_cam2gripper = (Mat_<double>(3, 3));
	Mat t_cam2gripper = (Mat_<double>(3, 1));

	std::vector<cv::Mat> vecHg, vecHc;
	for (size_t i = 0; i < mRvecs.size(); i++)//标定板位姿
	{
		cv::Mat Rvec, Tvec;
		Rvec = mRvecs[i].clone();
		Tvec = mTvecs[i].clone();
		cv::solvePnP(mvvStandardCalibrateBoard[i], mvvCalibrateBoard2DPoints[i], mintrinsicParam, mdistortParam, Rvec, Tvec, true, SOLVEPNP_ITERATIVE);
		cv::Rodrigues(Rvec, Rvec);

		R_target2cam.push_back(Rvec);
		t_target2cam.push_back(Tvec);

		cv::Mat tmp = cv::Mat::eye(4, 4, CV_64FC1);
		Tvec.copyTo(tmp({ 3, 0, 1, 3 }));
		Rvec.copyTo(tmp({ 0, 0, 3, 3 }));
		vecHc.push_back(tmp);
	}

	//这里是ZYZ旋转mvvRobotPose保存的是每次拍摄时机器人的位姿,分别是XYZ,AER(alpha,belta,gamma)。数据类型为:std::vector<std::vector<double>> mvvRobotPose
	for (size_t i = 0; i < mvvRobotPose.size(); i++)//探针到机器人位姿
	{
		double angle1 = mvvRobotPose[i][3], angle2 = mvvRobotPose[i][4], angle3 = mvvRobotPose[i][5];
		double r11 = cos(angle1) * cos(angle2) * cos(angle3) - sin(angle1) * sin(angle3);
		double r12 = -cos(angle1) * cos(angle2) * sin(angle3) - sin(angle1) * cos(angle3);
		double r13 = cos(angle1) * sin(angle2);
		double r21 = sin(angle1) * cos(angle2) * cos(angle3) + cos(angle1) * sin(angle3);
		double r22 = -sin(angle1) * cos(angle2) * sin(angle3) + cos(angle1) * cos(angle3);
		double r23 = sin(angle1) * sin(angle2);
		double r31 = -sin(angle2) * cos(angle3);
		double r32 = sin(angle2) * sin(angle3);
		double r33 = cos(angle2);
		cv::Mat ZYZ_matrix = (cv::Mat_<double>(3, 3) << r11, r12, r13, r21, r22, r23, r31, r32, r33);

		//按顺序合成后的旋转矩阵
		if (!isRotatedMatrix(ZYZ_matrix))		//欧拉角特殊情况下会出现死锁
		{
			cout << "Euler Angle convert to RotatedMatrix failed..." << endl;
			std::getchar();
			exit(-1);
		}
		Mat TMat = (Mat_<double>(3, 1) << mvvRobotPose[i][0], mvvRobotPose[i][1], mvvRobotPose[i][2]);
		R_gripper2base.push_back(ZYZ_matrix);
		t_gripper2base.push_back(TMat);

		cv::Mat tmp = cv::Mat::eye(4, 4, CV_64FC1);
		TMat.copyTo(tmp({ 3, 0, 1, 3 }));
		ZYZ_matrix.copyTo(tmp({ 0, 0, 3, 3 }));
		vecHg.push_back(tmp);
	}
	//手眼标定,CALIB_HAND_EYE_TSAI法为11ms,最快
	calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper, t_cam2gripper, CALIB_HAND_EYE_TSAI);

	cv::Vec3f angel3 = rotationMatrixToEulerAngles(R_cam2gripper) * 180 / CV_PI;
	std::cout << "绕z轴(前向方向)滚转角:" << angel3[0] << std::endl;
	std::cout << "绕y轴(垂直方向)偏航角:" << angel3[1] << std::endl;
	std::cout << "绕x轴(水平方向)俯仰角:" << angel3[2] << std::endl;
	std::cout << std::endl;

	//vector<Mat> Hgij, Hcij;
	//for (int i = 1; i < vecHg.size(); i++)
	//{
	//	Hgij.push_back(vecHg[i - 1].inv()*vecHg[i]);
	//	Hcij.push_back(vecHc[i - 1] * vecHc[i].inv());
	//}
	cv::Mat Hcg = cv::Mat::eye(4, 4, CV_64FC1);
	//Tsai_HandEye(Hcg, Hgij, Hcij);  //方式二:不调用opencv库函数,直接通过公式计算

	Hcg = R_T2RT(R_cam2gripper, t_cam2gripper);//矩阵合并
	cout << "手眼数据验证:" << endl;
	for (int i = 0; i < vecHc.size(); i++)
	{
		cv::Mat Hcb = vecHg[i] * Hcg * vecHc[i];
		cout << vecHg[i] * Hcg * vecHc[i] << endl << endl;//.inv()
		cv::Point3d data(Hcb.at<double>(0, 3), Hcb.at<double>(1, 3), Hcb.at<double>(2, 3));
		cout << cv::norm(data) << endl << endl;
	}
	
	cout << "标定板在相机中的位姿:" << endl;
	cout << vecHc[1] << endl;
	cout << "手眼系统反演的位姿为:" << endl;
	//用手眼系统预测第一组数据中标定板相对相机的位姿,是否与vecHc[1]相同
	cout << Hcg.inv()* vecHg[1].inv()* vecHg[0] * Hcg*vecHc[0] << endl << endl;

	cout << "----手眼系统测试----" << endl;
	cout << "机械臂下标定板XYZ为:" << endl;
	double aveX = 0.0, aveY = 0.0, aveZ = 0.0;
	std::vector<cv::Mat> vworldPos;
	for (int i = 0; i < vecHc.size(); ++i)
	{
		cv::Mat cheesePos{ 0.0,0.0,0.0,1.0 };//4*1矩阵,单独求机械臂下,标定板的xyz
		cv::Mat worldPos = vecHg[i] * Hcg*vecHc[i] * cheesePos;
		vworldPos.push_back(worldPos);

		aveX = aveX + worldPos.at<double>(0, 0);
		aveY = aveY + worldPos.at<double>(1, 0);
		aveZ = aveZ + worldPos.at<double>(2, 0);
		cout << i << ": " << worldPos.t() << endl;
	}
	cout << endl;
	aveX = aveX / vecHc.size();
	aveY = aveY / vecHc.size();
	aveZ = aveZ / vecHc.size();

	double errX = 0.0, errY = 0.0, errZ = 0.0;
	for (int i = 0; i < vworldPos.size(); i++)
	{
		errX = errX + abs(vworldPos[i].at<double>(0, 0) - aveX);
		errY = errY + abs(vworldPos[i].at<double>(1, 0) - aveY);
		errZ = errZ + abs(vworldPos[i].at<double>(2, 0) - aveZ);
	}
	cout << "X方向的精度:" << errX / vworldPos.size() << endl;
	cout << "Y方向的精度:" << errY / vworldPos.size() << endl;
	cout << "Z方向的精度:" << errZ / vworldPos.size() << endl;

	{
		cv::FileStorage writer("./pose.yaml", cv::FileStorage::WRITE);
		if (!writer.isOpened())
			return;
		writer << "R" << R_cam2gripper;
		writer << "t" << t_cam2gripper;
		writer.release();
	}
};
bool sRotatedMatrix(Mat& R)		//旋转矩阵的转置矩阵是它的逆矩阵,逆矩阵 * 矩阵 = 单位矩阵
{
	Mat temp33 = R({ 0,0,3,3 });	//无论输入是几阶矩阵,均提取它的三阶矩阵
	Mat Rt;
	transpose(temp33, Rt);  //转置矩阵
	Mat shouldBeIdentity = Rt * temp33;//是旋转矩阵则乘积为单位矩阵
	Mat I = Mat::eye(3, 3, shouldBeIdentity.type());

	return cv::norm(I, shouldBeIdentity) < 1e-6;
}
cv::Vec3f rotationMatrixToEulerAngles(cv::Mat &R)
{
	assert(isRotatedMatrix(R));
	float sy = sqrt(R.at<double>(0, 0) * R.at<double>(0, 0) + R.at<double>(1, 0) * R.at<double>(1, 0));

	bool singular = sy < 1e-6; // If
	float x, y, z;
	if (!singular)
	{
		x = atan2(R.at<double>(2, 1), R.at<double>(2, 2));
		y = atan2(-R.at<double>(2, 0), sy);
		z = atan2(R.at<double>(1, 0), R.at<double>(0, 0));
	}
	else
	{
		x = atan2(-R.at<double>(1, 2), R.at<double>(1, 1));
		y = atan2(-R.at<double>(2, 0), sy);
		z = 0;
	}
	return cv::Vec3f(z, y, x);
}
void Tsai_HandEye(Mat& Hcg, vector<Mat> Hgij, vector<Mat> Hcij)
{
	CV_Assert(Hgij.size() == Hcij.size());
	int nStatus = Hgij.size();

	Mat Rgij(3, 3, CV_64FC1);
	Mat Rcij(3, 3, CV_64FC1);

	Mat rgij(3, 1, CV_64FC1);
	Mat rcij(3, 1, CV_64FC1);

	double theta_gij;
	double theta_cij;

	Mat rngij(3, 1, CV_64FC1);
	Mat rncij(3, 1, CV_64FC1);

	Mat Pgij(3, 1, CV_64FC1);
	Mat Pcij(3, 1, CV_64FC1);

	Mat tempA(3, 3, CV_64FC1);
	Mat tempb(3, 1, CV_64FC1);

	Mat A;
	Mat b;
	Mat pinA;

	Mat Pcg_prime(3, 1, CV_64FC1);
	Mat Pcg(3, 1, CV_64FC1);
	Mat PcgTrs(1, 3, CV_64FC1);

	Mat Rcg(3, 3, CV_64FC1);
	Mat eyeM = Mat::eye(3, 3, CV_64FC1);

	Mat Tgij(3, 1, CV_64FC1);
	Mat Tcij(3, 1, CV_64FC1);

	Mat tempAA(3, 3, CV_64FC1);
	Mat tempbb(3, 1, CV_64FC1);

	Mat AA;
	Mat bb;
	Mat pinAA;

	Mat Tcg(3, 1, CV_64FC1);

	for (int i = 0; i < nStatus; i++)
	{
		Hgij[i](Rect(0, 0, 3, 3)).copyTo(Rgij);
		Hcij[i](Rect(0, 0, 3, 3)).copyTo(Rcij);

		Rodrigues(Rgij, rgij);
		Rodrigues(Rcij, rcij);

		theta_gij = norm(rgij);
		theta_cij = norm(rcij);

		rngij = rgij / theta_gij;
		rncij = rcij / theta_cij;

		Pgij = 2 * sin(theta_gij / 2)*rngij;
		Pcij = 2 * sin(theta_cij / 2)*rncij;

		tempA = skew(Pgij + Pcij);
		tempb = Pcij - Pgij;

		A.push_back(tempA);
		b.push_back(tempb);
	}

	//Compute rotation
	invert(A, pinA, DECOMP_SVD);

	Pcg_prime = pinA * b;
	Pcg = 2 * Pcg_prime / sqrt(1 + norm(Pcg_prime) * norm(Pcg_prime));
	PcgTrs = Pcg.t();
	Rcg = (1 - norm(Pcg) * norm(Pcg) / 2) * eyeM + 0.5 * (Pcg * PcgTrs + sqrt(4 - norm(Pcg)*norm(Pcg))*skew(Pcg));

	//Compute Translation 
	for (int i = 0; i < nStatus; i++)
	{
		Hgij[i](Rect(0, 0, 3, 3)).copyTo(Rgij);
		Hcij[i](Rect(0, 0, 3, 3)).copyTo(Rcij);
		Hgij[i](Rect(3, 0, 1, 3)).copyTo(Tgij);
		Hcij[i](Rect(3, 0, 1, 3)).copyTo(Tcij);


		tempAA = Rgij - eyeM;
		tempbb = Rcg * Tcij - Tgij;

		AA.push_back(tempAA);
		bb.push_back(tempbb);
	}

	invert(AA, pinAA, DECOMP_SVD);
	Tcg = pinAA * bb;

	Rcg.copyTo(Hcg(Rect(0, 0, 3, 3)));
	Tcg.copyTo(Hcg(Rect(3, 0, 1, 3)));
	Hcg.at<double>(3, 0) = 0.0;
	Hcg.at<double>(3, 1) = 0.0;
	Hcg.at<double>(3, 2) = 0.0;
	Hcg.at<double>(3, 3) = 1.0;

}
//R和T转RT矩阵
Mat R_T2RT(Mat &R, Mat &T)
{
	Mat RT;
	Mat_<double> R1 = (cv::Mat_<double>(4, 3) << R.at<double>(0, 0), R.at<double>(0, 1), R.at<double>(0, 2),
		R.at<double>(1, 0), R.at<double>(1, 1), R.at<double>(1, 2),
		R.at<double>(2, 0), R.at<double>(2, 1), R.at<double>(2, 2),
		0.0, 0.0, 0.0);
	cv::Mat_<double> T1 = (cv::Mat_<double>(4, 1) << T.at<double>(0, 0), T.at<double>(1, 0), T.at<double>(2, 0), 1.0);

	cv::hconcat(R1, T1, RT);//C=A+B左右拼接
	return RT;
}
  1. 标定板三维点构建方式:
	//生成标定板三维点,标定板纵向角点数量mInitParams.CalibrateBoardSize.height,例如(8)
	//标定板横向角点数量mInitParams.CalibrateBoardSize.width,例如(11)
	//标定板一个格子的大小CalibrateBoardLength,例如(15)。
	mvStandardCalibrateBoard.resize(0);
	for (int i = 0; i < mInitParams.CalibrateBoardSize.height; i++)
	{
		for (int j = 0; j < mInitParams.CalibrateBoardSize.width; j++)
		{
			mvStandardCalibrateBoard.push_back(cv::Point3f(float(j * mInitParams.CalibrateBoardLength), float(i * mInitParams.CalibrateBoardLength), 0));
		}
	}
  1. 标定板图像二维点构建方式:
void runCalibrateBoardDetect(const cv::Mat& im, std::vector<cv::Point2f>& FeaturePoints, cv::Rect Roi)
{
	mImageSize = im.size();

	cv::Mat imGray;
	cv::Rect _Roi;
	if (im.channels() != 1)
	{
		if (Roi.width == 0 || Roi.height == 0)
		{
			cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY);
		}
		else
		{
			_Roi = ChangeRoi(Roi, imGray.rows, imGray.cols);
			cv::cvtColor(im(_Roi), imGray, cv::COLOR_BGR2GRAY);
		}
	}
	else
	{
		if (Roi.width == 0 || Roi.height == 0)
		{
			imGray = im.clone();
		}
		else
		{
			_Roi = ChangeRoi(Roi, imGray.rows, imGray.cols);
			imGray = im(_Roi).clone();
		}
	}

	bool is_found = cv::findChessboardCorners(imGray, mInitParams.CalibrateBoardSize, FeaturePoints);
	if (is_found)
		cv::cornerSubPix(imGray, FeaturePoints, cv::Size(7, 7), cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));

	if (is_found)
	{
		if (Roi.width == 0 || Roi.height == 0)
		{
			mvvCalibrateBoard2DPoints.push_back(FeaturePoints);
		}
		else
		{
			for (int i = 0; i < FeaturePoints.size(); i++)
			{
				FeaturePoints[i].x = FeaturePoints[i].x + _Roi.x;
				FeaturePoints[i].y = FeaturePoints[i].y + _Roi.y;
			}
			mvvCalibrateBoard2DPoints.push_back(FeaturePoints);
		}
		mvvStandardCalibrateBoard.push_back(mvStandardCalibrateBoard);
	}
}
  1. 相机内参计算方式:
void runMonoCalobrate(double& error)
{
	if (mvvCalibrateBoard2DPoints.size() < 3)
	{
		std::cout << (std::string("no enough images for camera ") + std::to_string(0)) << std::endl;
		return;
	}
	mRvecs.clear();
	mTvecs.clear();
	error = cv::calibrateCamera(mvvStandardCalibrateBoard, mvvCalibrateBoard2DPoints, mImageSize, mintrinsicParam, mdistortParam, mRvecs, mTvecs);
}

3、提高精度的几个方向

  1. 两次运动的旋转轴的夹角:越大越好
  2. 每次运动的旋转矩阵对应的旋转角度:越大越好
  3. 相机中心到标定板的距离:距离越小越好
  4. 每次运动机械臂末端运动的距离:距离越小越好
  5. 尽量采集多组用于求解的数据。
  6. 使用高精度的相机标定方法。
  7. 尽量提高机械臂的绝对定位精度,如果该条件达不到,至少需要提高相对运动精度。
  • 22
    点赞
  • 18
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值