diff --git a/CMakeLists.txt b/CMakeLists.txt index c6b7224..f77a229 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,6 +43,9 @@ target_link_libraries(console_calibration tool_point_calibration ) +add_executable(tool_orientation_calibration src/tool_orientation_calibration.cpp) +add_dependencies(tool_orientation_calibration ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) +target_link_libraries(tool_orientation_calibration ${catkin_LIBRARIES} ${CERES_LIBRARIES} tool_point_calibration) ############# ## Install ## diff --git a/README.md b/README.md index 73be57e..18c682d 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ The core tcp calibration library only requires the non-linear optimization solve The console calibration utility uses ROS' TF libraries. ## Use -To use: +### tool_point_calibration: 1. Start up your robots driver as normal. A node like `robot_state_publisher` should be publishing TF frames of your robot. 2. Identify a base frame, either the base frame of the robot or the origin of the scene. This will be used for TF lookups, and the `touch point` will be reported in this frame. @@ -25,4 +25,27 @@ To use: 6. Repeat step 5 at least three more times. Each time approach the fixed point in the world from a different orientation. If you wish to abort early, control-C the node. - 7. When you've captured all of the required points, the calibration will be automatically run and the output displayed to the screen. \ No newline at end of file + 7. When you've captured all of the required points, the calibration will be automatically run and the output displayed to the screen. + + +### tool_orientation_calibration: + 1. repeat the first three steps in the case of tool_point_calibration. + + 2. Run the orientation node: + ``` + roslaunch tool_point_calibration orientation_calibration.launch base_frame:=YOUR_BASE_FRAME tool0_frame:=YOUR_TOOL0_FRAME num_samples:=NUMBER_OF_TOUCH_POINTS + ``` + By default base frame is `base_frame`, tool0_frame is `tool0`, and num_samples is `4`. I recommend you use at least 4 + samples. + + 3. Repeat steps 5,6,7 of the tool_point_calibration case. + + 4. The last pose that has been used for translational calibration will be automatically saved as the first poin for the orientation calibration, two more points are requied. + + 5. Jog the robot in such away that the TCP moves to any location along the X axis of the tool and press enter. + + 6. Jog the robot in such away that the TCP moves to any location along the Y axis of the tool and press enter. + + + 7. When you've captured all of the required points, the orientation calibration will be automatically run and the output displayed to the screen. + diff --git a/include/tool_point_calibration/tool_point_calibration.h b/include/tool_point_calibration/tool_point_calibration.h index 1b6f481..7452a35 100644 --- a/include/tool_point_calibration/tool_point_calibration.h +++ b/include/tool_point_calibration/tool_point_calibration.h @@ -2,11 +2,12 @@ #define TOOL_POINT_CALIBRATION_H #include +#include namespace tool_point_calibration { -typedef std::vector> Affine3dVector; +typedef std::vector> Isometry3dVector; struct TcpCalibrationResult { @@ -40,7 +41,7 @@ struct TcpCalibrationResult * @param touch_pt_guess * @return */ -TcpCalibrationResult calibrateTcp(const Affine3dVector& tool_poses, +TcpCalibrationResult calibrateTcp(const Isometry3dVector& tool_poses, const Eigen::Vector3d& tcp_guess = Eigen::Vector3d::Zero(), const Eigen::Vector3d& touch_pt_guess = Eigen::Vector3d::Zero()); diff --git a/launch/orientation_calibration.launch b/launch/orientation_calibration.launch new file mode 100644 index 0000000..64705f1 --- /dev/null +++ b/launch/orientation_calibration.launch @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/src/console_calibration.cpp b/src/console_calibration.cpp index 9ee77e1..a2a90ad 100644 --- a/src/console_calibration.cpp +++ b/src/console_calibration.cpp @@ -55,7 +55,7 @@ int main(int argc, char** argv) } // Create storage for user observations - tool_point_calibration::Affine3dVector observations; + tool_point_calibration::Isometry3dVector observations; observations.reserve(num_samples); std::string line; @@ -75,7 +75,7 @@ int main(int argc, char** argv) listener.lookupTransform(base_frame, tool0_frame, ros::Time(0), transform); - Eigen::Affine3d eigen_pose; + Eigen::Isometry3d eigen_pose; tf::poseTFToEigen(transform, eigen_pose); observations.push_back(eigen_pose); diff --git a/src/tool_orientation_calibration.cpp b/src/tool_orientation_calibration.cpp new file mode 100644 index 0000000..88be7cb --- /dev/null +++ b/src/tool_orientation_calibration.cpp @@ -0,0 +1,188 @@ +#include +#include +#include +#include +#include +using namespace Eigen; +using Eigen::MatrixXd; +/** +this node is used to calibrate the orientation of the tool point. +it is an implementation of a paper entitled: +Methods of Calibrating the Orientation of the Industrial Robot Tool[1]. +variables names are same as the ones used in the paper. +------------------- +[1]https://www.researchgate.net/publication/330247404_ +Methods_of_Calibrating_the_Orientation_of_the_Industrial_Robot_Tool +**/ +int main(int argc, char** argv) +{ + ros::init(argc, argv, "tool_orientation_calibration"); + ros::NodeHandle pnh ("~"); + // Load user parmameters + std::string base_frame, tool0_frame; + int num_samples; + pnh.param("base_frame", base_frame, "base_link"); + pnh.param("tool0_frame", tool0_frame, "tool0"); + pnh.param("num_samples", num_samples, 4); + ROS_INFO("Starting tool calibration with base frame: '%s' and tool0 frame: '%s'.", + base_frame.c_str(), tool0_frame.c_str()); + ROS_INFO("\n ======= step 1: translation calibration ========== "); + ROS_INFO("Move the robot to '%d' different poses, each of which should touch" + " the tool to the same position in space.\n", num_samples); + // Create a transform listener to query tool frames + tf::TransformListener listener; + std::string error_msg; + if (!listener.waitForTransform(base_frame, tool0_frame, ros::Time(0), + ros::Duration(1.0), ros::Duration(0.01), + &error_msg)) + { + ROS_WARN_STREAM("Unable to lookup transform between base frame: '" << base_frame + << "' and tool frame: '" << tool0_frame << "'. TF reported error: " + << error_msg); + bool base_found = listener.frameExists(base_frame); + bool tool_found = listener.frameExists(tool0_frame); + if (!base_found && !tool_found) + { + ROS_WARN("Check to make sure that a robot state publisher or other node is publishing" + " tf frames for your robot. Also check that your base/tool frames names are" + " correct and not missing a prefix, for example."); + } + else if (!base_found) + { + ROS_WARN("Check to make sure that base frame '%s' actually exists.", base_frame.c_str()); + } + else if (!tool_found) + { + ROS_WARN("Check to make sure that tool0 frame '%s' actually exists.", tool0_frame.c_str()); + } + return 1; + } + // Create storage for user observations + tool_point_calibration::Isometry3dVector observations; + observations.reserve(num_samples); + std::string line; + int count = 0; + Eigen::Isometry3d eigen_pose; + // While ros is ok and there are more counts to be done... + while (ros::ok() && count < num_samples) + { + ROS_INFO("Pose %d: Jog robot to a new location touching the shared position and" + " press enter.", count); + std::getline(std::cin, line); // Blocks program until enter is pressed + tf::StampedTransform transform; + try + { + listener.lookupTransform(base_frame, tool0_frame, + ros::Time(0), transform); + tf::poseTFToEigen(transform, eigen_pose); + observations.push_back(eigen_pose); + ROS_INFO_STREAM("Pose " << count << ": captured transform:\n" << eigen_pose.matrix()); + count++; + } + catch (const tf::TransformException& ex) + { + ROS_ERROR("%s",ex.what()); + ros::Duration(1.0).sleep(); + continue; + } + } + ROS_INFO("Calibration captured %d tool poses (out of %d requested). Computing calibration...", + count, num_samples); + tool_point_calibration::TcpCalibrationResult result = + tool_point_calibration::calibrateTcp(observations); + ROS_INFO_STREAM("Calibrated tcp (meters in xyz): [" << result.tcp_offset.transpose() << "] from " << tool0_frame); + ROS_INFO_STREAM("Touch point (meters in xyz): [" << result.touch_point.transpose() << "] in frame " << base_frame); + ROS_INFO_STREAM("Average residual: " << result.average_residual); + ROS_INFO_STREAM("Converged: " << result.converged); + // store the translation vector for the next step: + Vector3d tcp_transl; + tcp_transl = result.tcp_offset.transpose(); + if (count < 4) + { + ROS_WARN("Computing a tool calibration w/ fewer than 4 points may produce an answer with good" + " convergence and residual error, but without a meaningful result. You are encouraged" + " to try with more points"); + } + ROS_INFO("=======\n step 2: TCP Orientation calibration using three point method ========== "); + std::vector A0n; // to store the transformation matrix for the points + A0n.resize( 3); + A0n[0] = eigen_pose; + ROS_INFO("the current pose is considered as the First point ... "); + for (int i=1; i<3; i++) + { + if(i ==1) + ROS_INFO("Second point: move the TCP to any location along the X axis of the tool and press enter"); + else + ROS_INFO("Third Point: move TCP to any location along the Y axis of the tool and press enter."); + std::getline(std::cin, line); // Blocks program until enter is pressed + tf::StampedTransform transform; + try + { + listener.lookupTransform(base_frame, tool0_frame, ros::Time(0), transform); + Eigen::Isometry3d eigen_pose; + tf::poseTFToEigen(transform, eigen_pose); + A0n[i] = eigen_pose; + ROS_INFO_STREAM("Pose " << i << ": captured transform:\n" << eigen_pose.matrix()); + } + catch (const tf::TransformException& ex) + { + ROS_ERROR("%s",ex.what()); + ros::Duration(1.0).sleep(); + continue; + } + } + // vector containing the coordinates of the calibrated tool TCP in the Sn ≡ FLANGE coordinate system [1] + Vector4d r0ns_n = {tcp_transl[0], tcp_transl[1], tcp_transl[2], 1}; + // vector of the top of the pointed tip attached to the robot's working space [1] + Vector4d rd_0 = A0n[0]*r0ns_n; + Vector4d r0ns_0 = rd_0; + // point 2: calculate delat1 and then c11, c21, c31 + Vector4d V1;//{ vx, vy, vz, 1}; + V1 = A0n[1].inverse() * A0n[0] * r0ns_n; // r0ns_0 has been modified to r0ns_n + double term1 = ( V1(0) - r0ns_n(0) ); + double term2 = ( V1(1) - r0ns_n(1) ); + double term3 = ( V1(2) - r0ns_n(2) ); + double delta1 = -1* sqrt( term1*term1 + term2*term2 + term3*term3); + double c11 = term1 / delta1; + double c21 = term2 / delta1; + double c31 = term3 / delta1; + ROS_INFO_STREAM(" delat1: "<< delta1); + ROS_INFO_STREAM("c11: "<< c11 << ", c21: "<< c21 << ", c31: "<