本文主要是介绍手眼标定的原理与实现,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
手眼标定的原理与实现
- 1、手眼标定原理部分
- 2、手眼标定实现部分(眼在手上)
- 3、提高精度的几个方向
1、手眼标定原理部分
- 手眼标定的核心问题在与求解AX=XB这个方程,只是针对于眼在手上和眼在手外时,A,B,X的定义略有差异。
- 这篇文章的下面这张图很形象的说明了手眼标定中眼在手上的问题。
- 这篇文章很好地解释了手眼标定中眼在手上和眼在手外的问题。
- 这篇文章是AX=XB这个方程的另外一种求解方式。
- 这篇文章是一个眼在手上的C++代码实现例子。
2、手眼标定实现部分(眼在手上)
- 数据类型定义
struct CalibrationInitParams
{cv::Size CalibrateBoardSize = cv::Size(11, 8);float CalibrateBoardLength = 30;
};
std::vector<std::vector<double>> mvvRobotPose;//内参和畸变cv::Mat mintrinsicParam;cv::Mat mdistortParam;//旋转和平移cv::Mat mrotationParam;cv::Mat mtranslationParam;//单目标定的旋转和平移std::vector<cv::Mat> mRvecs, mTvecs;//图像大小cv::Size mImageSize = cv::Size(0, 0);CalibrationInitParams mInitParams;//标准标定板三维点std::vector<cv::Point3f> mvStandardCalibrateBoard;//标准标定板三维点 []:图像序号; []:空间三维点std::vector<std::vector<cv::Point3f>> mvvStandardCalibrateBoard;//[]:图像序号; []:图像二维点std::vector<std::vector<cv::Point2f>> mvvCalibrateBoard2DPoints;
- 主要实现部分:mvvStandardCalibrateBoard是标定板(例如棋盘格标定板)上的三维点;mvvCalibrateBoard2DPoints是标定板上检测到的二维点;mintrinsicParam是相机的内部参数;mdistortParam是相机的畸变参数;机器人旋转使用的是ZYZ,如果是XYZ的话,需要修改旋转矩阵,参考。
void runCalibrateHandEye(double& error)
{//定义手眼标定矩阵std::vector<Mat> R_gripper2base;std::vector<Mat> t_gripper2base;std::vector<Mat> R_target2cam;std::vector<Mat> t_target2cam;Mat R_cam2gripper = (Mat_<double>(3, 3));Mat t_cam2gripper = (Mat_<double>(3, 1));std::vector<cv::Mat> vecHg, vecHc;for (size_t i = 0; i < mRvecs.size(); i++)//标定板位姿{cv::Mat Rvec, Tvec;Rvec = mRvecs[i].clone();Tvec = mTvecs[i].clone();cv::solvePnP(mvvStandardCalibrateBoard[i], mvvCalibrateBoard2DPoints[i], mintrinsicParam, mdistortParam, Rvec, Tvec, true, SOLVEPNP_ITERATIVE);cv::Rodrigues(Rvec, Rvec);R_target2cam.push_back(Rvec);t_target2cam.push_back(Tvec);cv::Mat tmp = cv::Mat::eye(4, 4, CV_64FC1);Tvec.copyTo(tmp({ 3, 0, 1, 3 }));Rvec.copyTo(tmp({ 0, 0, 3, 3 }));vecHc.push_back(tmp);}//这里是ZYZ旋转mvvRobotPose保存的是每次拍摄时机器人的位姿,分别是XYZ,AER(alpha,belta,gamma)。数据类型为:std::vector<std::vector<double>> mvvRobotPosefor (size_t i = 0; i < mvvRobotPose.size(); i++)//探针到机器人位姿{double angle1 = mvvRobotPose[i][3], angle2 = mvvRobotPose[i][4], angle3 = mvvRobotPose[i][5];double r11 = cos(angle1) * cos(angle2) * cos(angle3) - sin(angle1) * sin(angle3);double r12 = -cos(angle1) * cos(angle2) * sin(angle3) - sin(angle1) * cos(angle3);double r13 = cos(angle1) * sin(angle2);double r21 = sin(angle1) * cos(angle2) * cos(angle3) + cos(angle1) * sin(angle3);double r22 = -sin(angle1) * cos(angle2) * sin(angle3) + cos(angle1) * cos(angle3);double r23 = sin(angle1) * sin(angle2);double r31 = -sin(angle2) * cos(angle3);double r32 = sin(angle2) * sin(angle3);double r33 = cos(angle2);cv::Mat ZYZ_matrix = (cv::Mat_<double>(3, 3) << r11, r12, r13, r21, r22, r23, r31, r32, r33);//按顺序合成后的旋转矩阵if (!isRotatedMatrix(ZYZ_matrix)) //欧拉角特殊情况下会出现死锁{cout << "Euler Angle convert to RotatedMatrix failed..." << endl;std::getchar();exit(-1);}Mat TMat = (Mat_<double>(3, 1) << mvvRobotPose[i][0], mvvRobotPose[i][1], mvvRobotPose[i][2]);R_gripper2base.push_back(ZYZ_matrix);t_gripper2base.push_back(TMat);cv::Mat tmp = cv::Mat::eye(4, 4, CV_64FC1);TMat.copyTo(tmp({ 3, 0, 1, 3 }));ZYZ_matrix.copyTo(tmp({ 0, 0, 3, 3 }));vecHg.push_back(tmp);}//手眼标定,CALIB_HAND_EYE_TSAI法为11ms,最快calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper, t_cam2gripper, CALIB_HAND_EYE_TSAI);cv::Vec3f angel3 = rotationMatrixToEulerAngles(R_cam2gripper) * 180 / CV_PI;std::cout << "绕z轴(前向方向)滚转角:" << angel3[0] << std::endl;std::cout << "绕y轴(垂直方向)偏航角:" << angel3[1] << std::endl;std::cout << "绕x轴(水平方向)俯仰角:" << angel3[2] << std::endl;std::cout << std::endl;//vector<Mat> Hgij, Hcij;//for (int i = 1; i < vecHg.size(); i++)//{// Hgij.push_back(vecHg[i - 1].inv()*vecHg[i]);// Hcij.push_back(vecHc[i - 1] * vecHc[i].inv());//}cv::Mat Hcg = cv::Mat::eye(4, 4, CV_64FC1);//Tsai_HandEye(Hcg, Hgij, Hcij); //方式二:不调用opencv库函数,直接通过公式计算Hcg = R_T2RT(R_cam2gripper, t_cam2gripper);//矩阵合并cout << "手眼数据验证:" << endl;for (int i = 0; i < vecHc.size(); i++){cv::Mat Hcb = vecHg[i] * Hcg * vecHc[i];cout << vecHg[i] * Hcg * vecHc[i] << endl << endl;//.inv()cv::Point3d data(Hcb.at<double>(0, 3), Hcb.at<double>(1, 3), Hcb.at<double>(2, 3));cout << cv::norm(data) << endl << endl;}cout << "标定板在相机中的位姿:" << endl;cout << vecHc[1] << endl;cout << "手眼系统反演的位姿为:" << endl;//用手眼系统预测第一组数据中标定板相对相机的位姿,是否与vecHc[1]相同cout << Hcg.inv()* vecHg[1].inv()* vecHg[0] * Hcg*vecHc[0] << endl << endl;cout << "----手眼系统测试----" << endl;cout << "机械臂下标定板XYZ为:" << endl;double aveX = 0.0, aveY = 0.0, aveZ = 0.0;std::vector<cv::Mat> vworldPos;for (int i = 0; i < vecHc.size(); ++i){cv::Mat cheesePos{ 0.0,0.0,0.0,1.0 };//4*1矩阵,单独求机械臂下,标定板的xyzcv::Mat worldPos = vecHg[i] * Hcg*vecHc[i] * cheesePos;vworldPos.push_back(worldPos);aveX = aveX + worldPos.at<double>(0, 0);aveY = aveY + worldPos.at<double>(1, 0);aveZ = aveZ + worldPos.at<double>(2, 0);cout << i << ": " << worldPos.t() << endl;}cout << endl;aveX = aveX / vecHc.size();aveY = aveY / vecHc.size();aveZ = aveZ / vecHc.size();double errX = 0.0, errY = 0.0, errZ = 0.0;for (int i = 0; i < vworldPos.size(); i++){errX = errX + abs(vworldPos[i].at<double>(0, 0) - aveX);errY = errY + abs(vworldPos[i].at<double>(1, 0) - aveY);errZ = errZ + abs(vworldPos[i].at<double>(2, 0) - aveZ);}cout << "X方向的精度:" << errX / vworldPos.size() << endl;cout << "Y方向的精度:" << errY / vworldPos.size() << endl;cout << "Z方向的精度:" << errZ / vworldPos.size() << endl;{cv::FileStorage writer("./pose.yaml", cv::FileStorage::WRITE);if (!writer.isOpened())return;writer << "R" << R_cam2gripper;writer << "t" << t_cam2gripper;writer.release();}
};
bool sRotatedMatrix(Mat& R) //旋转矩阵的转置矩阵是它的逆矩阵,逆矩阵 * 矩阵 = 单位矩阵
{Mat temp33 = R({ 0,0,3,3 }); //无论输入是几阶矩阵,均提取它的三阶矩阵Mat Rt;transpose(temp33, Rt); //转置矩阵Mat shouldBeIdentity = Rt * temp33;//是旋转矩阵则乘积为单位矩阵Mat I = Mat::eye(3, 3, shouldBeIdentity.type());return cv::norm(I, shouldBeIdentity) < 1e-6;
}
cv::Vec3f rotationMatrixToEulerAngles(cv::Mat &R)
{assert(isRotatedMatrix(R));float sy = sqrt(R.at<double>(0, 0) * R.at<double>(0, 0) + R.at<double>(1, 0) * R.at<double>(1, 0));bool singular = sy < 1e-6; // Iffloat x, y, z;if (!singular){x = atan2(R.at<double>(2, 1), R.at<double>(2, 2));y = atan2(-R.at<double>(2, 0), sy);z = atan2(R.at<double>(1, 0), R.at<double>(0, 0));}else{x = atan2(-R.at<double>(1, 2), R.at<double>(1, 1));y = atan2(-R.at<double>(2, 0), sy);z = 0;}return cv::Vec3f(z, y, x);
}
void Tsai_HandEye(Mat& Hcg, vector<Mat> Hgij, vector<Mat> Hcij)
{CV_Assert(Hgij.size() == Hcij.size());int nStatus = Hgij.size();Mat Rgij(3, 3, CV_64FC1);Mat Rcij(3, 3, CV_64FC1);Mat rgij(3, 1, CV_64FC1);Mat rcij(3, 1, CV_64FC1);double theta_gij;double theta_cij;Mat rngij(3, 1, CV_64FC1);Mat rncij(3, 1, CV_64FC1);Mat Pgij(3, 1, CV_64FC1);Mat Pcij(3, 1, CV_64FC1);Mat tempA(3, 3, CV_64FC1);Mat tempb(3, 1, CV_64FC1);Mat A;Mat b;Mat pinA;Mat Pcg_prime(3, 1, CV_64FC1);Mat Pcg(3, 1, CV_64FC1);Mat PcgTrs(1, 3, CV_64FC1);Mat Rcg(3, 3, CV_64FC1);Mat eyeM = Mat::eye(3, 3, CV_64FC1);Mat Tgij(3, 1, CV_64FC1);Mat Tcij(3, 1, CV_64FC1);Mat tempAA(3, 3, CV_64FC1);Mat tempbb(3, 1, CV_64FC1);Mat AA;Mat bb;Mat pinAA;Mat Tcg(3, 1, CV_64FC1);for (int i = 0; i < nStatus; i++){Hgij[i](Rect(0, 0, 3, 3)).copyTo(Rgij);Hcij[i](Rect(0, 0, 3, 3)).copyTo(Rcij);Rodrigues(Rgij, rgij);Rodrigues(Rcij, rcij);theta_gij = norm(rgij);theta_cij = norm(rcij);rngij = rgij / theta_gij;rncij = rcij / theta_cij;Pgij = 2 * sin(theta_gij / 2)*rngij;Pcij = 2 * sin(theta_cij / 2)*rncij;tempA = skew(Pgij + Pcij);tempb = Pcij - Pgij;A.push_back(tempA);b.push_back(tempb);}//Compute rotationinvert(A, pinA, DECOMP_SVD);Pcg_prime = pinA * b;Pcg = 2 * Pcg_prime / sqrt(1 + norm(Pcg_prime) * norm(Pcg_prime));PcgTrs = Pcg.t();Rcg = (1 - norm(Pcg) * norm(Pcg) / 2) * eyeM + 0.5 * (Pcg * PcgTrs + sqrt(4 - norm(Pcg)*norm(Pcg))*skew(Pcg));//Compute Translation for (int i = 0; i < nStatus; i++){Hgij[i](Rect(0, 0, 3, 3)).copyTo(Rgij);Hcij[i](Rect(0, 0, 3, 3)).copyTo(Rcij);Hgij[i](Rect(3, 0, 1, 3)).copyTo(Tgij);Hcij[i](Rect(3, 0, 1, 3)).copyTo(Tcij);tempAA = Rgij - eyeM;tempbb = Rcg * Tcij - Tgij;AA.push_back(tempAA);bb.push_back(tempbb);}invert(AA, pinAA, DECOMP_SVD);Tcg = pinAA * bb;Rcg.copyTo(Hcg(Rect(0, 0, 3, 3)));Tcg.copyTo(Hcg(Rect(3, 0, 1, 3)));Hcg.at<double>(3, 0) = 0.0;Hcg.at<double>(3, 1) = 0.0;Hcg.at<double>(3, 2) = 0.0;Hcg.at<double>(3, 3) = 1.0;}
//R和T转RT矩阵
Mat R_T2RT(Mat &R, Mat &T)
{Mat RT;Mat_<double> R1 = (cv::Mat_<double>(4, 3) << R.at<double>(0, 0), R.at<double>(0, 1), R.at<double>(0, 2),R.at<double>(1, 0), R.at<double>(1, 1), R.at<double>(1, 2),R.at<double>(2, 0), R.at<double>(2, 1), R.at<double>(2, 2),0.0, 0.0, 0.0);cv::Mat_<double> T1 = (cv::Mat_<double>(4, 1) << T.at<double>(0, 0), T.at<double>(1, 0), T.at<double>(2, 0), 1.0);cv::hconcat(R1, T1, RT);//C=A+B左右拼接return RT;
}
- 标定板三维点构建方式:
//生成标定板三维点,标定板纵向角点数量mInitParams.CalibrateBoardSize.height,例如(8)//标定板横向角点数量mInitParams.CalibrateBoardSize.width,例如(11)//标定板一个格子的大小CalibrateBoardLength,例如(15)。mvStandardCalibrateBoard.resize(0);for (int i = 0; i < mInitParams.CalibrateBoardSize.height; i++){for (int j = 0; j < mInitParams.CalibrateBoardSize.width; j++){mvStandardCalibrateBoard.push_back(cv::Point3f(float(j * mInitParams.CalibrateBoardLength), float(i * mInitParams.CalibrateBoardLength), 0));}}
- 标定板图像二维点构建方式:
void runCalibrateBoardDetect(const cv::Mat& im, std::vector<cv::Point2f>& FeaturePoints, cv::Rect Roi)
{mImageSize = im.size();cv::Mat imGray;cv::Rect _Roi;if (im.channels() != 1){if (Roi.width == 0 || Roi.height == 0){cv::cvtColor(im, imGray, cv::COLOR_BGR2GRAY);}else{_Roi = ChangeRoi(Roi, imGray.rows, imGray.cols);cv::cvtColor(im(_Roi), imGray, cv::COLOR_BGR2GRAY);}}else{if (Roi.width == 0 || Roi.height == 0){imGray = im.clone();}else{_Roi = ChangeRoi(Roi, imGray.rows, imGray.cols);imGray = im(_Roi).clone();}}bool is_found = cv::findChessboardCorners(imGray, mInitParams.CalibrateBoardSize, FeaturePoints);if (is_found)cv::cornerSubPix(imGray, FeaturePoints, cv::Size(7, 7), cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));if (is_found){if (Roi.width == 0 || Roi.height == 0){mvvCalibrateBoard2DPoints.push_back(FeaturePoints);}else{for (int i = 0; i < FeaturePoints.size(); i++){FeaturePoints[i].x = FeaturePoints[i].x + _Roi.x;FeaturePoints[i].y = FeaturePoints[i].y + _Roi.y;}mvvCalibrateBoard2DPoints.push_back(FeaturePoints);}mvvStandardCalibrateBoard.push_back(mvStandardCalibrateBoard);}
}
- 相机内参计算方式:
void runMonoCalobrate(double& error)
{if (mvvCalibrateBoard2DPoints.size() < 3){std::cout << (std::string("no enough images for camera ") + std::to_string(0)) << std::endl;return;}mRvecs.clear();mTvecs.clear();error = cv::calibrateCamera(mvvStandardCalibrateBoard, mvvCalibrateBoard2DPoints, mImageSize, mintrinsicParam, mdistortParam, mRvecs, mTvecs);
}
3、提高精度的几个方向
- 两次运动的旋转轴的夹角:越大越好
- 每次运动的旋转矩阵对应的旋转角度:越大越好
- 相机中心到标定板的距离:距离越小越好
- 每次运动机械臂末端运动的距离:距离越小越好
- 尽量采集多组用于求解的数据。
- 使用高精度的相机标定方法。
- 尽量提高机械臂的绝对定位精度,如果该条件达不到,至少需要提高相对运动精度。
这篇关于手眼标定的原理与实现的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!