🧭 ORB-SLAM + D435i提取相机位姿 + ROS发布
由于需要再ORB-SLAM中提取D435i的位姿,但是寻找资料后发现都是基于stereo双目的,没有基于rgbd的,且提取到的姿态存在问题,所以写了这篇内容,感谢github大佬的分享,非常有效但无人问津
大佬文章在此https://github.com/raulmur/ORB_SLAM2/issues/832
内容基于ros_rgbd.cc修改,由于板卡性能限制我额外加了图像缩放,用于提高性能
无论ORB-SLAM2还是ORB-SLAM3都通用
📄 最终修改代码如下(ros_rgbd.cc)
#include <ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <geometry_msgs/PoseStamped.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/synchronizer.h>
#include <tf/transform_datatypes.h>
#include <tf/transform_broadcaster.h>
#include <opencv2/core/core.hpp>
#include "../../../include/System.h"
class ImageGrabber
{
public:
ImageGrabber(ORB_SLAM2::System* pSLAM, ros::NodeHandle& nh)
: mpSLAM(pSLAM)
{
//位姿发布,PoseStamped类型,按需更改
pose_pub = nh.advertise<geometry_msgs::PoseStamped>("/orbslam2/vision_pose/pose", 10);
LoadScaleParams(nh);
}
void GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB, const sensor_msgs::ImageConstPtr& msgD);
private:
double scale_set;
ORB_SLAM2::System* mpSLAM;
ros::Publisher pose_pub;
void LoadScaleParams(ros::NodeHandle& nh);
};
//图像缩放倍数,用于优化性能
void ImageGrabber::LoadScaleParams(ros::NodeHandle& nh)
{
nh.param("scale", scale_set, 0.4);
}
//位姿变换方法 TransformFromMat
tf::Transform TransformFromMat(cv::Mat position_mat) {
cv::Mat rotation = position_mat.rowRange(0,3).colRange(0,3);
cv::Mat translation = position_mat.rowRange(0,3).col(3);
tf::Matrix3x3 tf_camera_rotation(
rotation.at<float>(0,0), rotation.at<float>(0,1), rotation.at<float>(0,2),
rotation.at<float>(1,0), rotation.at<float>(1,1), rotation.at<float>(1,2),
rotation.at<float>(2,0), rotation.at<float>(2,1), rotation.at<float>(2,2)
);
tf::Vector3 tf_camera_translation(
translation.at<float>(0),
translation.at<float>(1),
translation.at<float>(2)
);
// ORB-SLAM 坐标系转 ROS 坐标系(相机坐标)
const tf::Matrix3x3 tf_orb_to_ros(
0, 0, 1,
-1, 0, 0,
0,-1, 0
);
tf_camera_rotation = tf_orb_to_ros * tf_camera_rotation;
tf_camera_translation = tf_orb_to_ros * tf_camera_translation;
// 位姿从 Tcw 转为 Twc(取逆)
tf_camera_rotation = tf_camera_rotation.transpose();
tf_camera_translation = -(tf_camera_rotation * tf_camera_translation);
// 再一次从 ORB 转为 ROS 坐标系(地图坐标)
tf_camera_rotation = tf_orb_to_ros * tf_camera_rotation;
tf_camera_translation = tf_orb_to_ros * tf_camera_translation;
return tf::Transform(tf_camera_rotation, tf_camera_translation);
}
//位姿提取发布
void ImageGrabber::GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB, const sensor_msgs::ImageConstPtr& msgD)
{
cv_bridge::CvImageConstPtr cv_ptrRGB, cv_ptrD;
try {
cv_ptrRGB = cv_bridge::toCvShare(msgRGB);
cv_ptrD = cv_bridge::toCvShare(msgD);
} catch (cv_bridge::Exception& e) {
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
cv::Mat rgb, depth;
double scale = scale_set;
cv::resize(cv_ptrRGB->image, rgb, cv::Size(), scale, scale, cv::INTER_LINEAR);
cv::resize(cv_ptrD->image, depth, cv::Size(), scale, scale, cv::INTER_NEAREST);
// Track方法提取位姿(其他方式同,如stereo、Monocular,替换RGBD即可)
cv::Mat Tcw = mpSLAM->TrackRGBD(rgb, depth, msgRGB->header.stamp.toSec());
if (Tcw.empty()) return;
//TransformFromMat方法 转换为 ROS 坐标系的 tf::Transform
tf::Transform transform = TransformFromMat(Tcw);
// 发布 TF 可视化(可选)
static tf::TransformBroadcaster tf_broadcaster;
tf_broadcaster.sendTransform(tf::StampedTransform(transform, msgRGB->header.stamp, "map", "camera_link"));
// 发布
tf::Stamped<tf::Pose> stamped_pose(transform, msgRGB->header.stamp, "map");
geometry_msgs::PoseStamped pose_msg;
tf::poseStampedTFToMsg(stamped_pose, pose_msg);
pose_pub.publish(pose_msg);
}
// ---------------- MAIN ------------------
int main(int argc, char **argv)
{
ros::init(argc, argv, "ORB_SLAM2_RGBD_DebugPose");
ros::start();
if(argc != 3)
{
cerr << endl << "Usage: rosrun ORB_SLAM2 RGBD_DebugPose path_to_vocabulary path_to_settings" << endl;
return 1;
}
ORB_SLAM2::System SLAM(argv[1], argv[2], ORB_SLAM2::System::RGBD, true);
ros::NodeHandle nh;
ImageGrabber igb(&SLAM, nh);
message_filters::Subscriber<sensor_msgs::Image> rgb_sub(nh, "/camera/rgb/image_raw", 1);
message_filters::Subscriber<sensor_msgs::Image> depth_sub(nh, "camera/depth_registered/image_raw", 1);
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;
message_filters::Synchronizer<sync_pol> sync(sync_pol(10), rgb_sub, depth_sub);
sync.registerCallback(boost::bind(&ImageGrabber::GrabRGBD, &igb, _1, _2));
ros::spin();
SLAM.Shutdown();
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
ros::shutdown();
return 0;
}
✅ 效果验证
在RVIZ中可视化发布的位姿pose
视频演示为单目