OSG中实现3D世界坐标到2D屏幕坐标的转换是许多应用的基础功能,如HUD显示、对象拾取等。以下是详细的实现方法:
1. 基本坐标转换原理
3D到2D坐标转换流程图:
2D到3D坐标转换流程图:
2. 核心转换方法
2.1 使用osg::Matrix实现转换
// 3D世界坐标转2D屏幕坐标
bool worldToScreen(const osg::Vec3& worldPos, osg::Vec3& screenPos,
const osg::Camera* camera)
{
// 获取视图矩阵、投影矩阵和视口
const osg::Matrix& viewMatrix = camera->getViewMatrix();
const osg::Matrix& projMatrix = camera->getProjectionMatrix();
const osg::Viewport* viewport = camera->getViewport();
if(!viewport) return false;
// 执行坐标转换
osg::Vec3 eyePos = worldPos * viewMatrix;
osg::Vec3 clipPos = eyePos * projMatrix;
// 归一化设备坐标(NDC)
osg::Vec3 ndcPos;
ndcPos.x() = clipPos.x() / clipPos.w();
ndcPos.y() = clipPos.y() / clipPos.w();
ndcPos.z() = clipPos.z() / clipPos.w();
// 转换为屏幕坐标
screenPos.x() = (ndcPos.x() * 0.5f + 0.5f) * viewport->width() + viewport->x();
screenPos.y() = (ndcPos.y() * 0.5f + 0.5f) * viewport->height() + viewport->y();
screenPos.z() = (ndcPos.z() * 0.5f + 0.5f);
// 检查点是否在视锥体内
return (ndcPos.x() >= -1.0f && ndcPos.x() <= 1.0f &&
ndcPos.y() >= -1.0f && ndcPos.y() <= 1.0f &&
ndcPos.z() >= -1.0f && ndcPos.z() <= 1.0f);
}
2.2 使用osgUtil::SceneView简化转换
osg::Vec3 worldToScreen(osg::Vec3 worldPos, osgUtil::SceneView* sceneView)
{
osg::Matrix MVPW = sceneView->getProjectionMatrix() *
sceneView->getViewMatrix() *
osg::Matrix::identity();
osg::Viewport* viewport = sceneView->getViewport();
osg::Vec3 screenPos = worldPos * MVPW;
// 透视除法
screenPos.x() = (screenPos.x() / screenPos.z() * 0.5f + 0.5f) * viewport->width() + viewport->x();
screenPos.y() = (screenPos.y() / screenPos.z() * 0.5f + 0.5f) * viewport->height() + viewport->y();
return screenPos;
}
3. 2D到3D坐标转换(逆向)
// 2D屏幕坐标转3D世界坐标(获取地面交点)
bool screenToWorld(const osg::Vec3& screenPos, osg::Vec3& worldPos,
const osg::Camera* camera, const osg::Vec3& planeNormal, float planeDistance)
{
if(!camera || !camera->getViewport()) return false;
// 创建从屏幕到世界的射线
osg::Matrix VPW = camera->getViewport()->computeWindowMatrix();
osg::Matrix inverseMVPW = osg::Matrix::inverse(
camera->getViewMatrix() *
camera->getProjectionMatrix() *
VPW);
// 近平面和远平面点
osg::Vec3 nearPoint = osg::Vec3(screenPos.x(), screenPos.y(), 0.0f) * inverseMVPW;
osg::Vec3 farPoint = osg::Vec3(screenPos.x(), screenPos.y(), 1.0f) * inverseMVPW;
// 计算与指定平面的交点
osg::Plane plane(planeNormal, planeDistance);
return plane.intersect(nearPoint, farPoint, worldPos);
}
4. 实际应用示例
4.1 在3D对象上显示2D标签
class LabelUpdateCallback : public osg::NodeCallback {
public:
LabelUpdateCallback(osgText::Text* label, osg::Node* target, osg::Camera* camera)
: _label(label), _target(target), _camera(camera) {}
virtual void operator()(osg::Node* node, osg::NodeVisitor* nv) {
// 获取目标的世界坐标
osg::Matrix worldMat = _target->getWorldMatrices()[0];
osg::Vec3 worldPos = osg::Vec3(0,0,0) * worldMat;
// 转换为屏幕坐标
osg::Vec3 screenPos;
if(worldToScreen(worldPos, screenPos, _camera)) {
_label->setPosition(osg::Vec3(screenPos.x(), screenPos.y(), 0));
_label->setNodeMask(0xFFFFFFFF); // 显示
} else {
_label->setNodeMask(0x0); // 隐藏
}
traverse(node, nv);
}
private:
osg::ref_ptr<osgText::Text> _label;
osg::ref_ptr<osg::Node> _target;
osg::ref_ptr<osg::Camera> _camera;
};
4.2 实现3D对象的屏幕空间固定效果
osg::Camera* createHUDCamera(osgViewer::Viewer* viewer)
{
// 创建HUD相机
osg::Camera* camera = new osg::Camera;
camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
camera->setProjectionMatrix(osg::Matrix::ortho2D(0, viewer->getCamera()->getViewport()->width(),
0, viewer->getCamera()->getViewport()->height()));
camera->setViewMatrix(osg::Matrix::identity());
camera->setClearMask(GL_DEPTH_BUFFER_BIT);
camera->setRenderOrder(osg::Camera::POST_RENDER);
camera->setAllowEventFocus(false);
return camera;
}
void addScreenFixedNode(osg::Group* root, osgViewer::Viewer* viewer, osg::Node* node, const osg::Vec3& screenPos)
{
osg::Camera* hudCamera = createHUDCamera(viewer);
// 更新回调保持屏幕位置
node->setUpdateCallback(new ScreenPositionCallback(viewer->getCamera(), screenPos));
hudCamera->addChild(node);
root->addChild(hudCamera);
}
5. 高级应用:拾取(Picking)实现
class PickHandler : public osgGA::GUIEventHandler {
public:
bool handle(const osgGA::GUIEventAdapter& ea, osgGA::GUIActionAdapter& aa) {
if(ea.getEventType() != osgGA::GUIEventAdapter::RELEASE ||
ea.getButton() != osgGA::GUIEventAdapter::LEFT_MOUSE_BUTTON)
return false;
osgViewer::View* view = dynamic_cast<osgViewer::View*>(&aa);
if(!view) return false;
// 2D屏幕坐标转3D世界射线
osgUtil::LineSegmentIntersector::Intersections intersections;
if(view->computeIntersections(ea.getX(), ea.getY(), intersections)) {
for(auto& hit : intersections) {
osg::Vec3 worldPos = hit.getWorldIntersectPoint();
osg::Vec3 screenPos;
// 3D到2D转换验证
worldToScreen(worldPos, screenPos, view->getCamera());
std::cout << "Picked at: World(" << worldPos << ") Screen(" << screenPos.x() << "," << screenPos.y() << ")\n";
}
}
return true;
}
};
6. 性能优化建议
批量转换:对多个点进行转换时,预计算MVPW矩阵
缓存结果:对于静态对象,缓存转换结果避免每帧计算
视锥体剔除:先检查点是否在视锥体内再执行转换
使用着色器:对大量点的大规模转换,考虑使用GPU计算
7. 常见问题解决
问题1:转换后的坐标不准确
检查是否使用了正确的模型矩阵(对于变换节点)
确认视口(viewport)设置正确
验证投影矩阵是否匹配当前相机设置
问题2:点在屏幕外时计算错误
添加视锥体检查代码
使用
clipPos.w()
进行透视除法而非直接使用z值
问题3:正交投影下的转换问题
正交投影下可以简化计算,不需要透视除法
检查正交投影矩阵的near/far平面设置