I am trying to solve this problem more or less as you proposed: * I have a quad with a texture that is my input video (input images are 640 x 480) * The first camera (with depth and color clear masks) looks at the quad and renders to the same image (I'm not sure if this is useful; it feels like an extra unnecessary step to me). The projection matrix is an orthogonal 640 x 480 matrix, the view matrix is an identity matrix. * The second camera (with only depth clear mask) looks at the scene and renders this to the same image. The projection and view matrices were recorded during the recording of the input video.
Both camera's use the COLOR_BUFFER. I use a NodeCallback as an UpdateCallback to my quad. This callback refreshes the quad texture with a new video image and updates the projection and view matrices of the second camera. I also set two final draw callbacks for each camera; just to see the contents of the image (i.e. I call writeImageFile). For the first camera (this final draw callback is called first), I write to file1.bmp and for the second camera I write to file2.bmp. My scene consists of a cylinder and a 2D picture that cuts the cylinder in the middle. I see that file1.bmp is what I expected: a video stream image is rendered into the image. file2.bmp is another case (this one is saved when the second camera has done drawing). I don't see my video stream (input) image anymore (has been cleared somehow) and the 3D data is rendered on top of each other (due to disabling of GL_COLOR_BUFFER_BIT ?). I have the impression that somehow two color buffers are used at the same time; one for the first camera and one for the second camera. Still, it's clear in the code that I attach both camera's to osg::Camera::COLOR_BUFFER with the same image (m_VideoImage). How is this possible ? Here is the code: Code: void VideoRecThread::setupTexture() { m_RenderTexture = new osg::Texture2D; m_RenderTexture->setTextureSize(640, 480); m_RenderTexture->setInternalFormat(GL_RGBA); m_RenderTexture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture2D::LINEAR); m_RenderTexture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture2D::LINEAR); m_RenderTexture->setDataVariance(osg::Object::DYNAMIC); } void VideoRecThread::setupGeometry() { osg::Geometry* polyGeom = new osg::Geometry(); osg::ref_ptr<osg::Geometry> screenQuad; screenQuad = createTexturedQuadGeometry(osg::Vec3(), osg::Vec3(640, 0.0, 0.0), osg::Vec3(0.0, 480, 0.0), 0.0f, 1.0f, 1.0f, 0.0); m_QuadGeode = new osg::Geode; m_QuadGeode->addDrawable(screenQuad.get()); screenQuad->setName("PolyGeom"); screenQuad->setDataVariance( osg::Object::DYNAMIC ); screenQuad->setSupportsDisplayList(false); osg::StateSet* stateset = new osg::StateSet; stateset->setTextureAttributeAndModes(0, m_RenderTexture,osg::StateAttribute::ON); screenQuad->setStateSet(stateset); } void VideoRecThread::setupImages() { m_VideoImage = new Image(); m_VideoImage->allocateImage(640,480,1, GL_RGBA, GL_UNSIGNED_BYTE); } void VideoRecThread::setupHudCamera() { m_pHudCamera = new osg::Camera; m_pHudCamera->setReferenceFrame(osg::Transform::ABSOLUTE_RF); m_pHudCamera->setProjectionMatrix(osg::Matrix::ortho2D(0, 640, 0, 480)); m_pHudCamera->setViewMatrix(osg::Matrix::identity()); m_pHudCamera->setRenderOrder(osg::Camera::PRE_RENDER); m_pHudCamera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); m_pHudCamera->setViewport(0,0,640,480); m_pHudCamera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT); m_pHudCamera->attach(osg::Camera::COLOR_BUFFER, m_VideoImage); m_pVideoRecGroup->addChild(m_pHudCamera.get()); m_pHudCamera->addChild(m_QuadGeode); m_TextureUpdateCallback = new TextureCallback(); m_TextureUpdateCallback->updateTexture.connect(boost::bind(&VideoRecThread::updateTexture,this)); m_QuadGeode->setUpdateCallback(m_TextureUpdateCallback); m_TextureCallback = new VideoPostDrawCallback(); m_TextureCallback->renderingCompleted.connect(boost::bind(&VideoRecThread:videoRenderingCompleted,this)); m_pHudCamera->setFinalDrawCallback(m_TextureCallback); } void VideoRecThread::setupSnapshotCamera() { m_pSnapshotcamera = new osg::Camera(); m_pSnapshotcamera->setReferenceFrame(osg::Transform::ABSOLUTE_RF); m_pSnapshotcamera->setRenderOrder(osg::Camera::PRE_RENDER); m_pSnapshotcamera->setClearMask(GL_DEPTH_BUFFER_BIT); m_pSnapshotcamera->setViewport(0,0,640,480); m_pSnapshotcamera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT); m_pSnapshotcamera->attach(osg::Camera::COLOR_BUFFER, m_VideoImage); osg::ref_ptr<osg::Node> pScene = getSceneManager()->getSceneData(); m_pSnapshotcamera->addChild(pScene); m_pVideoRecGroup->addChild(m_pSnapshotcamera.get()); m_VideoCallback = new VideoPostDrawCallback(); m_VideoCallback->renderingCompleted.connect(boost::bind(&VideoRecThread::renderingCompleted,this)); m_pSnapshotcamera->setFinalDrawCallback(m_VideoCallback); } void VideoRecThread::postProcess() { m_bInitialized = false; m_pVideoRecGroup = new osg::Group; getSceneManager()->getSceneRoot()->addChild(m_pVideoRecGroup.get()); // adds the video recording group with all its camera's to the scene setupImages(); setupTexture(); setupGeometry(); setupHudCamera(); setupSnapshotCamera(); m_CurrentArFrameIndex = 0; for (unsigned int i = 0; i < m_RecordedFrames; ++i) { m_bInitialized = true; m_SnapshotMutex.lock(); m_SnapshotCondition.wait(&m_SnapshotMutex); m_SnapshotMutex.unlock(); // Perform some post processing code per new rendered frame } m_VideoCallback->renderingCompleted.disconnect_all_slots(); m_TextureCallback->renderingCompleted.disconnect_all_slots(); m_TextureUpdateCallback->updateTexture.disconnect_all_slots(); m_pSnapshotcamera->setFinalDrawCallback(NULL); m_pHudCamera->setFinalDrawCallback(NULL); m_QuadGeode->setUpdateCallback(NULL); getSceneManager()->getSceneRoot()->removeChild(m_pVideoRecGroup); } Matrixd VideoRecThread::getProjectionMatrix() const { const Matrixd& res = getMainWindow()->getOsgWidget(0)->getCameraSettings()->getProjectionMatrix(); return res; } Matrixd VideoRecThread::getViewMatrix() const { const Matrixd& res = getMainWindow()->getOsgWidget(0)->getCameraSettings()->getViewMatrix(); return res; } void VideoRecThread::renderingCompleted() { osgDB::writeImageFile(*m_VideoImage, "file2.bmp" ); m_SnapshotMutex.lock(); m_SnapshotCondition.wakeAll(); m_SnapshotMutex.unlock(); if (m_CurrentArFrameIndex == m_RecordedFrames) { getSceneManager()->getSceneRoot()->removeChild(m_pVideoRecGroup); } } void VideoRecThread::videoRenderingCompleted() { osgDB::writeImageFile(*m_VideoImage, "file1.bmp" ); } void VideoRecThread::updateTexture() { if (!m_bInitialized) { return; } if (m_CurrentArFrameIndex < m_RecordedFrames) { m_VideoImage->setImage(m_VideoWidth,m_VideoHeight,1, 3, GL_RGBA, GL_UNSIGNED_BYTE, (unsigned char*) m_RingBuffer[m_CurrentArFrameIndex], Image::NO_DELETE); m_RenderTexture->setImage(m_VideoImage); osg::StateSet* quadState = m_QuadGeode->getOrCreateStateSet(); quadState->setTextureAttributeAndModes(0, m_RenderTexture, osg::StateAttribute::ON); m_pSnapshotcamera->setProjectionMatrix(m_ProjectionMatrices[m_CurrentArFrameIndex]); m_pSnapshotcamera->setViewMatrix(m_ViewMatrices[m_CurrentArFrameIndex]); m_pSnapshotcamera->setViewport(0,0,640,480); ++m_CurrentArFrameIndex; } } Thank you! Benedikt. ------------------ Read this topic online here: http://forum.openscenegraph.org/viewtopic.php?p=33300#33300 Attachments: http://forum.openscenegraph.org//files/file2_213.bmp http://forum.openscenegraph.org//files/file1_136.bmp _______________________________________________ osg-users mailing list osg-users@lists.openscenegraph.org http://lists.openscenegraph.org/listinfo.cgi/osg-users-openscenegraph.org