最长的一帧学习 part4

九、osgUtil:: SceneView::draw ()

part1 初始化osg::State类的GL库函数,判断是否使用场景视图初始化访问器(SceneView::setInitVisitor)

  • State类保存了所有的OpenGL状态和属性参数;
  • 除此之外,State类还负责从当前系统平台的OpenGL链接库中获取函数的地址,这也是我们第一次执行场景绘制之前的必备工作,所用函数为State::initializeExtensionProcs。
  • 如果用户设置了场景视图初始化访问器(SceneView::setInitVisitor),那么draw函数第一次执行时将使用这个访问器遍历场景树,相关代码位于SceneView::init函数中。
void SceneView::draw()
{
    if (_camera->getNodeMask()==0) return;

    osg::State* state = _renderInfo.getState();

    // we in theory should be able to be able to bypass reset, but we'll call it just in case.
    //_state->reset();
    state->setFrameStamp(_frameStamp.get());

    if (_displaySettings.valid())
    {
        state->setDisplaySettings(_displaySettings.get());
    }

	//从当前系统平台的OpenGL链接库中获取函数的地址
    state->initializeExtensionProcs();

    osg::get<ContextData>(state->getContextID())->newFrame(state->getFrameStamp());

// SceneView::setInitVisitor在其中
    if (!_initCalled) init();

    // note, to support multi-pipe systems the deletion of OpenGL display list
    // and texture objects is deferred until the OpenGL context is the correct
    // context for when the object were originally created.  Here we know what
    // context we are in so can flush the appropriate caches.

//将所有已经标记为要删除的节点或者Drawable对象统一从场景和内存中删除,执行flushDeletedGLObjects函数
    if (_requiresFlush)
    {
        double availableTime = 0.005;
        flushDeletedGLObjects(availableTime);
    }

    // assume the draw which is about to happen could generate GL objects that need flushing in the next frame.
    _requiresFlush = _automaticFlush;
    。。。
}

part2 针对非立体显示的后续操作

void SceneView::draw()
{
。。。
	 else
    {

        //  切换立体声关闭时,需要恢复绘制缓冲区。
        /*设置渲染台(RenderStage)的读/写缓存(通常包括GL_NONE,GL_FRONT_LEFT,GL_FRONT_RIGHT,
        GL_BACK_LEFT,GL_BACK_RIGHT, GL_FRONT,GL_BACK,GL_LEFT,GL_RIGHT,GL_FRONT_AND_BACK以及GL_AUX辅助缓存),
        其中的值是根据摄像机的setDrawBuffer和setReadBuffer函数来设定的。*/
        if( 0 == ( _camera->getInheritanceMask() & DRAW_BUFFER ) )
        {
            _renderStage->setDrawBuffer(_camera->getDrawBuffer());
            _renderStage->setReadBuffer(_camera->getDrawBuffer());
        }

        if( 0 == ( _camera->getInheritanceMask() & READ_BUFFER ) )
        {
            _renderStage->setReadBuffer(_camera->getReadBuffer());
        }

        _localStateSet->setAttribute(getViewport());

		//确保颜色掩码的每个颜色通道都是被激活的(使用osg::ColorMask)。
        if (_resetColorMaskToAllEnabled)
        {
            // ensure that all color planes are active.
            osg::ColorMask* cmask = static_cast<osg::ColorMask*>(_localStateSet->getAttribute(osg::StateAttribute::COLORMASK));
            if (cmask)
            {
                cmask->setMask(true,true,true,true);
            }
            else
            {
                cmask = new osg::ColorMask(true,true,true,true);
                _localStateSet->setAttribute(cmask);
            }
            _renderStage->setColorMask(cmask);
        }

		// 执行“前序渲染”渲染台的绘制(RenderStage::drawPreRenderStages)
        // bog standard draw.
        _renderStage->drawPreRenderStages(_renderInfo,previous);
        //执行当前渲染台(即渲染树的根节点)的绘制(RenderStage::draw),无疑这是场景绘制的核心部分
        _renderStage->draw(_renderInfo,previous);
    }

	// 在结束了渲染树的绘制之后,SceneView::draw函数还负责恢复所有的OpenGL状态(使用State::popAllStateSets函数),判断是否在绘制过程中出现了OpenGL绘图命令错误,并将错误信息打印出来
    // re apply the default OGL state.
    state->popAllStateSets();
    state->apply();

#if 0
    if (_camera->getPostDrawCallback())
    {
        (*(_camera->getPostDrawCallback()))(*_camera);
    }
#endif

    if (state->getCheckForGLErrors()!=osg::State::NEVER_CHECK_GL_ERRORS)
    {
        if (state->checkGLErrors("end of SceneView::draw()"))
        {
            // go into debug mode of OGL error in a fine grained way to help
            // track down OpenGL errors.
            state->setCheckForGLErrors(osg::State::ONCE_PER_ATTRIBUTE);
        }
    }

// #define REPORT_TEXTURE_MANAGER_STATS
#ifdef REPORT_TEXTURE_MANAGER_STATS
    tom->reportStats();
    bom->reportStats();
#endif

    // OSG_NOTICE<<"SceneView  draw() DynamicObjectCount"<<getState()->getDynamicObjectCount()<<std::endl;

}
。。。
}

part2.1 osgUtil::RenderStage::drawPreRenderStages

void RenderStage::drawPreRenderStages(osg::RenderInfo& renderInfo,RenderLeaf*& previous)
{
    if (_preRenderList.empty()) return;

    //cout << "Drawing prerendering stages "<<this<< "  "<<_viewport->x()<<","<< _viewport->y()<<","<< _viewport->width()<<","<< _viewport->height()<<std::endl;
    for(RenderStageList::iterator itr=_preRenderList.begin();
        itr!=_preRenderList.end();
        ++itr)
    {
        itr->second->draw(renderInfo,previous);
    }
    //cout << "Done Drawing prerendering stages "<<this<< "  "<<_viewport->x()<<","<< _viewport->y()<<","<< _viewport->width()<<","<< _viewport->height()<<std::endl;
}

part2.2 osgUtil::RenderStage::draw 当前渲染台(即渲染树的根节点)的绘制

void RenderStage::draw(osg::RenderInfo& renderInfo,RenderLeaf*& previous)
{
    if (_stageDrawnThisFrame) return;

    if(_initialViewMatrix.valid()) renderInfo.getState()->setInitialViewMatrix(_initialViewMatrix.get());

    // push the stages camera so that drawing code can query it
    if (_camera.valid()) renderInfo.pushCamera(_camera.get());

    _stageDrawnThisFrame = true;

    if (_camera.valid() && _camera->getInitialDrawCallback())
    {
        // if we have a camera with a initial draw callback invoke it.
        // 执行摄像机的初始化回调(Camera::setInitialDrawCallback)
        _camera->getInitialDrawCallback()->run(renderInfo);
    }

    // note, SceneView does call to drawPreRenderStages explicitly
    // so there is no need to call it here.
    drawPreRenderStages(renderInfo,previous);

    if (_cameraRequiresSetUp || (_camera.valid() && _cameraAttachmentMapModifiedCount!=_camera->getAttachmentMapModifiedCount()))
    {
    	// 运行摄像机设置(RenderStage::runCameraSetUp),rtt
        runCameraSetUp(renderInfo);
    }

	//	为了保证各个图形处理线程之间不会产生冲突,这里对当前调用的图形设备指针(GraphicsContext)做了一个检查。
	// 如果发现正在运行的图形设备与渲染台所记录的当前设备(RenderStage::_graphicsContext)不同的话,
	// 则转换到当前设备,避免指定渲染上下文时(GraphicsContext::makeCurrent)出错
    osg::State& state = *renderInfo.getState();

    osg::State* useState = &state;
    osg::GraphicsContext* callingContext = state.getGraphicsContext();
    osg::GraphicsContext* useContext = callingContext;
    osg::OperationThread* useThread = 0;
    osg::RenderInfo useRenderInfo(renderInfo);

    RenderLeaf* saved_previous = previous;
	
	// _graphicsContext是当前渲染台记录的gc,参数renderInfo
	// 需要转到当前渲染台记录的gc
    if (_graphicsContext.valid() && _graphicsContext != callingContext)
    {
        // show we release the context so that others can use it?? will do so right
        // now as an experiment.
        callingContext->releaseContext();

        // OSG_NOTICE<<"  enclosing state before - "<<state.getStateSetStackSize()<<std::endl;

        useState = _graphicsContext->getState();
        useContext = _graphicsContext.get();
        useThread = useContext->getGraphicsThread();
        useRenderInfo.setState(useState);

        // synchronize the frame stamps
        useState->setFrameStamp(const_cast<osg::FrameStamp*>(state.getFrameStamp()));

        // map the DynamicObjectCount across to the new window
        useState->setDynamicObjectCount(state.getDynamicObjectCount());
        useState->setDynamicObjectRenderingCompletedCallback(state.getDynamicObjectRenderingCompletedCallback());

        if (!useThread)
        {
            previous = 0;
            useContext->makeCurrent();

            // OSG_NOTICE<<"  nested state before - "<<useState->getStateSetStackSize()<<std::endl;
        }
    }

    unsigned int originalStackSize = useState->getStateSetStackSize();

	// 执行摄像机的绘制前回调(Camera::setPreDrawCallback)
    if (_camera.valid() && _camera->getPreDrawCallback())
    {
        // if we have a camera with a pre draw callback invoke it.
        _camera->getPreDrawCallback()->run(renderInfo);
    }

    bool doCopyTexture = _texture.valid() ?
                        (callingContext != useContext) :
                        false;

    if (useThread)
    {
#if 1
        ref_ptr<osg::BlockAndFlushOperation> block = new osg::BlockAndFlushOperation;

        useThread->add(new DrawInnerOperation( this, renderInfo ));// 专用于绘制工作

        useThread->add(block.get());//强制在绘制结束之后方能继续执行线程的其它Operation对象

        // wait till the DrawInnerOperations is complete.
        block->block();

        doCopyTexture = false;

#else
        useThread->add(new DrawInnerOperation( this, renderInfo ), true);

        doCopyTexture = false;
#endif
    }
    else
    {
    	//单线程模型
        drawInner( useRenderInfo, previous, doCopyTexture);

        if (useRenderInfo.getUserData() != renderInfo.getUserData())
        {
        	//	将当前使用的转为当前渲染台记录的
            renderInfo.setUserData(useRenderInfo.getUserData());
        }

    }

    if (useState != &state)
    {
        // reset the local State's DynamicObjectCount
        state.setDynamicObjectCount(useState->getDynamicObjectCount());
        useState->setDynamicObjectRenderingCompletedCallback(0);
    }


    // now copy the rendered image to attached texture.
    if (_texture.valid() && !doCopyTexture)
    {
        if (callingContext && useContext!= callingContext)
        {
            // make the calling context use the pbuffer context for reading.
            callingContext->makeContextCurrent(useContext);
        }
		// 针对FRAME_BUFFER渲染目标,将场景拷贝到用户指定的纹理对象中
        copyTexture(renderInfo);
    }

	// 执行摄像机的绘制后回调(Camera::setPostDrawCallback)
    if (_camera.valid() && _camera->getPostDrawCallback())
    {
        // if we have a camera with a post draw callback invoke it.
        _camera->getPostDrawCallback()->run(renderInfo);
    }

	// 如果是单线程,使用glFlush刷新所有OpenGL管道中的命令,并释放当前渲染上下文(GraphicsContext::releaseContext)。
	// 释放的是上文中转到当前渲染设备记录的gc
    if (_graphicsContext.valid() && _graphicsContext != callingContext)
    {
        useState->popStateSetStackToSize(originalStackSize);

        if (!useThread)
        {


            // flush any command left in the useContex's FIFO
            // to ensure that textures are updated before main thread commenses.
            glFlush();


            useContext->releaseContext();
        }
    }

    if (callingContext && useContext != callingContext)
    {
        // restore the graphics context.

        previous = saved_previous;

        // OSG_NOTICE<<"  nested state after - "<<useState->getStateSetStackSize()<<std::endl;
        // OSG_NOTICE<<"  enclosing state after - "<<state.getStateSetStackSize()<<std::endl;

        callingContext->makeCurrent();
    }

	// 执行“后序渲染”渲染台的绘制(RenderStage::drawPostRenderStages)
    // render all the post draw callbacks
    drawPostRenderStages(renderInfo,previous);

	// 执行摄像机的绘制结束回调(Camera::setFinalDrawCallback)
    if (_camera.valid() && _camera->getFinalDrawCallback())
    {
        // if we have a camera with a final callback invoke it.
        _camera->getFinalDrawCallback()->run(renderInfo);
    }

    // pop the render stages camera.
    if (_camera.valid()) renderInfo.popCamera();

    // clean up state graph to make sure RenderLeaf etc, can be reused
    if (_rootStateGraph.valid()) _rootStateGraph->clean();
}

可见场景绘制时总共会执行五种不同时机下调用的摄像机回调(尤其注意回调时机与渲染上下文的关系),根据我们的实际需要,可以选择在某个回调中执行OpenGL函数(初始化与结束回调时不能执行)或者自定义代码。

osgUtil::RenderStage::runCameraSetUp
  • 实现了一个场景渲染过程中可能非常重要的功能,即纹理烘焙(Render To Texture,RTT),或者称之为“渲染到纹理”。RTT技术意味着我们可以将后台实时绘制得到的场景图像直接作为另一个场景中对象的纹理,从而实现更加丰富的场景表达效果。
  • RTT实现的基本步骤为:(1)首先创建一个“渲染纹理”(Render Texture),例如FBO帧缓存对象,像素缓存对象等;(2)设置它为图形设备的渲染目标(Render Target);(3)将“渲染纹理”绑定到一个纹理或图片对象上;(4)此时图形设备的渲染将在后台进行,其结果将直接体现在所绑定的纹理对象上。
  • “渲染目标”的设定即通过Camera::getRenderTargetImplementation函数实现。其可用的传入参数中,FRAME_BUFFER表示帧缓存,可以适用于较广泛的硬件平台上(例如屏幕);而FRAME_BUFFER_OBJECT表示FBO对象,它可以用来实现离屏渲染(Offscreen Rendering)的工作,其渲染结果不会体现在图形窗口中。
  • 渲染目标和绑定纹理的方法:纹理对象texture与场景绘制的帧缓存(渲染目标)绑定在一起
// 如果将渲染目标设置为FBO,实际上意义为,将设置为渲染目标的渲染纹理(FBO)绑定到相机的纹理附件(attach,attachment)上,
// 结果将直接体现在所绑定的纹理对象/附件上
osg::Texture2D* texture = new osg::Texture2D;
camera->setRenderTargetImplementation( osg::Camera::FRAME_BUFFER );
camera->attach( osg::Camera::COLOR_BUFFER, texture );

我们既可以将texture的内容保存成图片,作为场景的截图;也可以将纹理绑定到某个物体上,实现纹理烘焙的效果。这里osgprerender是一个很好的例子,使用附加参数–fb,–fbo,–pbuffer,–window等可以充分了解不同渲染目标实现的过程及其差异。

  • 绑定到摄像机的实际纹理或者图片,在Camera类中均使用Camera::Attachment结构体来保存

RenderStage::runCameraSetUp则反复遍历名为Camera::BufferAttachmentMap的映射表,检索并设置那些与颜色缓存(COLOR_BUFFER),深度缓存(DEPTH_BUFFER)等相对应的Attachment对象。
关于纹理细节可以参考这里

  • osgUtil::RenderStage中:
//osgUtil::RenderStage
std::map< osg::Camera::BufferComponent, Attachment> _bufferAttachmentMap;

enum BufferComponent
        {
            DEPTH_BUFFER,
            STENCIL_BUFFER,
            PACKED_DEPTH_STENCIL_BUFFER,
            COLOR_BUFFER,
            COLOR_BUFFER0,
            COLOR_BUFFER1 = COLOR_BUFFER0+1,
            COLOR_BUFFER2 = COLOR_BUFFER0+2,
            COLOR_BUFFER3 = COLOR_BUFFER0+3,
            COLOR_BUFFER4 = COLOR_BUFFER0+4,
            COLOR_BUFFER5 = COLOR_BUFFER0+5,
            COLOR_BUFFER6 = COLOR_BUFFER0+6,
            COLOR_BUFFER7 = COLOR_BUFFER0+7,
            COLOR_BUFFER8 = COLOR_BUFFER0+8,
            COLOR_BUFFER9 = COLOR_BUFFER0+9,
            COLOR_BUFFER10 = COLOR_BUFFER0+10,
            COLOR_BUFFER11 = COLOR_BUFFER0+11,
            COLOR_BUFFER12 = COLOR_BUFFER0+12,
            COLOR_BUFFER13 = COLOR_BUFFER0+13,
            COLOR_BUFFER14 = COLOR_BUFFER0+14,
            COLOR_BUFFER15 = COLOR_BUFFER0+15
        };
        
struct Attachment
        {
            osg::ref_ptr<osg::Image>                _image;
            GLenum                                  _imageReadPixelFormat;
            GLenum                                  _imageReadPixelDataType;
        };

  • osg::camera中:
//osg::camera中
typedef std::map< BufferComponent, Attachment> BufferAttachmentMap;
BufferAttachmentMap	_bufferAttachmentMap;//camera成员
struct Attachment
        {
            Attachment():
                _internalFormat(GL_NONE),
                _level(0),
                _face(0),
                _mipMapGeneration(false),
                _multisampleSamples(0),
                _multisampleColorSamples(0) {}

            int width() const
            {
                if (_texture.valid()) return _texture->getTextureWidth();
                if (_image.valid()) return _image->s();
                return 0;
            };

            int height() const
            {
                if (_texture.valid()) return _texture->getTextureHeight();
                if (_image.valid()) return _image->t();
                return 0;
            };

            int depth() const
            {
                if (_texture.valid()) return _texture->getTextureDepth();
                if (_image.valid()) return _image->r();
                return 0;
            };

            GLenum              _internalFormat;
            ref_ptr<Image>      _image;
            ref_ptr<Texture>    _texture;
            unsigned int        _level;
            unsigned int        _face;
            bool                _mipMapGeneration;
            unsigned int        _multisampleSamples;
            unsigned int        _multisampleColorSamples;
        };

void Camera::attach(BufferComponent buffer, GLenum internalFormat)
{
    switch(buffer)
    {
    case DEPTH_BUFFER:
        if(_bufferAttachmentMap.find(PACKED_DEPTH_STENCIL_BUFFER) != _bufferAttachmentMap.end())
        {
            OSG_WARN << "Camera: DEPTH_BUFFER already attached as PACKED_DEPTH_STENCIL_BUFFER !" << std::endl;
        }
        break;

    case STENCIL_BUFFER:
        if(_bufferAttachmentMap.find(PACKED_DEPTH_STENCIL_BUFFER) != _bufferAttachmentMap.end())
        {
            OSG_WARN << "Camera: STENCIL_BUFFER already attached as PACKED_DEPTH_STENCIL_BUFFER !" << std::endl;
        }
        break;

    case PACKED_DEPTH_STENCIL_BUFFER:
        if(_bufferAttachmentMap.find(DEPTH_BUFFER) != _bufferAttachmentMap.end())
        {
            OSG_WARN << "Camera: DEPTH_BUFFER already attached !" << std::endl;
        }
        if(_bufferAttachmentMap.find(STENCIL_BUFFER) != _bufferAttachmentMap.end())
        {
            OSG_WARN << "Camera: STENCIL_BUFFER already attached !" << std::endl;
        }
        break;
    default:
        break;
    }
    _bufferAttachmentMap[buffer]._internalFormat = internalFormat;
}

void Camera::attach(BufferComponent buffer, osg::Texture* texture, unsigned int level, unsigned int face, bool mipMapGeneration,
                    unsigned int multisampleSamples,
                    unsigned int multisampleColorSamples)
{
    _bufferAttachmentMap[buffer]._texture = texture;
    _bufferAttachmentMap[buffer]._level = level;
    _bufferAttachmentMap[buffer]._face = face;
    _bufferAttachmentMap[buffer]._mipMapGeneration = mipMapGeneration;
    _bufferAttachmentMap[buffer]._multisampleSamples = multisampleSamples;
    _bufferAttachmentMap[buffer]._multisampleColorSamples = multisampleColorSamples;
}
  • RenderStage::runCameraSetUp:

void RenderStage::runCameraSetUp(osg::RenderInfo& renderInfo)
{
    _cameraRequiresSetUp = false;

    if (!_camera) return;

    OSG_INFO<<"RenderStage::runCameraSetUp(osg::RenderInfo& renderInfo) "<<this<<std::endl;

    _cameraAttachmentMapModifiedCount = _camera->getAttachmentMapModifiedCount();

    osg::State& state = *renderInfo.getState();

    osg::Camera::RenderTargetImplementation renderTargetImplementation = _camera->getRenderTargetImplementation();
    osg::Camera::RenderTargetImplementation renderTargetFallback = _camera->getRenderTargetFallback();

    osg::Camera::BufferAttachmentMap& bufferAttachments = _camera->getBufferAttachmentMap();

    _bufferAttachmentMap.clear();

    // compute the required dimensions
    int width = static_cast<int>(_viewport->x() + _viewport->width());
    int height = static_cast<int>(_viewport->y() + _viewport->height());
    int depth = 1;
    for(osg::Camera::BufferAttachmentMap::iterator itr = bufferAttachments.begin();
        itr != bufferAttachments.end();
        ++itr)
    {
        width = osg::maximum(width,itr->second.width());
        height = osg::maximum(height,itr->second.height());
        depth = osg::maximum(depth,itr->second.depth());
    }

    // OSG_NOTICE<<"RenderStage::runCameraSetUp viewport "<<_viewport->x()<<" "<<_viewport->y()<<" "<<_viewport->width()<<" "<<_viewport->height()<<std::endl;
    // OSG_NOTICE<<"RenderStage::runCameraSetUp computed "<<width<<" "<<height<<" "<<depth<<std::endl;

    // attach images that need to be copied after the stage is drawn.
    for(osg::Camera::BufferAttachmentMap::iterator itr = bufferAttachments.begin();
        itr != bufferAttachments.end();
        ++itr)
    {
        // if one exist attach image to the RenderStage.
        if (itr->second._image.valid())
        {
            osg::Image* image = itr->second._image.get();
            GLenum pixelFormat = image->getPixelFormat();
            GLenum dataType = image->getDataType();

            if (image->data()==0)
            {
                if (pixelFormat==0) pixelFormat = itr->second._internalFormat;
                if (pixelFormat==0) pixelFormat = _imageReadPixelFormat;
                if (pixelFormat==0) pixelFormat = GL_RGBA;

                if (dataType==0) dataType = _imageReadPixelDataType;
                if (dataType==0) dataType = GL_UNSIGNED_BYTE;
            }

            _bufferAttachmentMap[itr->first]._imageReadPixelFormat = pixelFormat;
            _bufferAttachmentMap[itr->first]._imageReadPixelDataType = dataType;
            _bufferAttachmentMap[itr->first]._image = image;
        }

        if (itr->second._texture.valid())
        {
            osg::Texture* texture = itr->second._texture.get();
            osg::Texture1D* texture1D = 0;
            osg::Texture2D* texture2D = 0;
            osg::Texture2DMultisample* texture2DMS = 0;
            osg::Texture3D* texture3D = 0;
            osg::TextureCubeMap* textureCubeMap = 0;
            osg::TextureRectangle* textureRectangle = 0;
            if (0 != (texture1D=dynamic_cast<osg::Texture1D*>(texture)))
            {
                if (texture1D->getTextureWidth()==0)
                {
                    texture1D->setTextureWidth(width);
                }
            }
            else if (0 != (texture2D = dynamic_cast<osg::Texture2D*>(texture)))
            {
                if (texture2D->getTextureWidth()==0 || texture2D->getTextureHeight()==0)
                {
                    texture2D->setTextureSize(width,height);
                }
            }
            else if (0 != (texture2DMS = dynamic_cast<osg::Texture2DMultisample*>(texture)))
            {
                if (texture2DMS->getTextureWidth()==0 || texture2DMS->getTextureHeight()==0)
                {
                    texture2DMS->setTextureSize(width,height);
                }
            }
            else if (0 != (texture3D = dynamic_cast<osg::Texture3D*>(texture)))
            {
                if (texture3D->getTextureWidth()==0 || texture3D->getTextureHeight()==0 || texture3D->getTextureDepth()==0 )
                {
                    // note we dont' have the depth here, so we'll heave to assume that height and depth are the same..
                    texture3D->setTextureSize(width,height,height);
                }
            }
            else if (0 != (textureCubeMap = dynamic_cast<osg::TextureCubeMap*>(texture)))
            {
                if (textureCubeMap->getTextureWidth()==0 || textureCubeMap->getTextureHeight()==0)
                {
                    textureCubeMap->setTextureSize(width,height);
                }
            }
            else if (0 != (textureRectangle = dynamic_cast<osg::TextureRectangle*>(texture)))
            {
                if (textureRectangle->getTextureWidth()==0 || textureRectangle->getTextureHeight()==0)
                {
                    textureRectangle->setTextureSize(width,height);
                }
            }

        }
    }

	if (renderTargetImplementation==osg::Camera::FRAME_BUFFER_OBJECT)
    {
        osg::GLExtensions* ext = state.get<osg::GLExtensions>();
        bool fbo_supported = ext->isFrameBufferObjectSupported;

        if (fbo_supported)
        {
            OSG_INFO<<"Setting up osg::Camera::FRAME_BUFFER_OBJECT"<<std::endl;

            OpenThreads::ScopedLock<OpenThreads::Mutex> lock(*(_camera->getDataChangeMutex()));

            osg::ref_ptr<osg::FrameBufferObject> fbo = new osg::FrameBufferObject;
            osg::ref_ptr<osg::FrameBufferObject> fbo_multisample;

            bool colorAttached = false;
            bool depthAttached = false;
            bool stencilAttached = false;
            unsigned samples = 0;
            unsigned colorSamples = 0;

            // This is not a cut and paste error. Set BOTH local masks
            // to the value of the Camera's use render buffers mask.
            // We'll change this if and only if we decide we're doing MSFBO.
            unsigned int renderBuffersMask = _camera->getImplicitBufferAttachmentRenderMask(true);
            unsigned int resolveBuffersMask = _camera->getImplicitBufferAttachmentRenderMask(true);

            if (ext->isRenderbufferMultisampleSupported())
            {
                for(osg::Camera::BufferAttachmentMap::iterator itr = bufferAttachments.begin();
                    itr != bufferAttachments.end();
                    ++itr)
                {
                    osg::Camera::Attachment& attachment = itr->second;
                    samples = maximum(samples, attachment._multisampleSamples);
                    colorSamples = maximum(colorSamples, attachment._multisampleColorSamples);
                }

                if (colorSamples > samples)
                {
                    OSG_NOTIFY(WARN) << "Multisample color samples must be less than or "
                        "equal to samples. Setting color samples equal to samples." << std::endl;
                    colorSamples = samples;
                }

                if (samples)
                {
                    fbo_multisample = new osg::FrameBufferObject;

                    // Use the value of the Camera's use resolve buffers mask as the
                    // resolve mask.
                    // 可以自定义,也可以从camera._displaySettings中获得
                    resolveBuffersMask = _camera->getImplicitBufferAttachmentResolveMask(true);
                }
            }

            for(osg::Camera::BufferAttachmentMap::iterator itr = bufferAttachments.begin();
                itr != bufferAttachments.end();
                ++itr)
            {

                osg::Camera::BufferComponent buffer = itr->first;
                osg::Camera::Attachment& attachment = itr->second;
				
				// 往帧缓冲对象中设置各种附件
                if (attachment._texture.valid() || attachment._image.valid())
                    fbo->setAttachment(buffer, osg::FrameBufferAttachment(attachment));
                // 或先渲染到renderbuffer
                else
                    fbo->setAttachment(buffer, osg::FrameBufferAttachment(new osg::RenderBuffer(width, height, attachment._internalFormat)));

                if (fbo_multisample.valid())
                {
                    GLenum internalFormat = attachment._internalFormat;
                    if (!internalFormat)
                    {
                        switch (buffer)
                        {
                        case Camera::DEPTH_BUFFER:
                            internalFormat = GL_DEPTH_COMPONENT24;
                            break;
                        case Camera::STENCIL_BUFFER:
                            internalFormat = GL_STENCIL_INDEX8_EXT;
                            break;
                        case Camera::PACKED_DEPTH_STENCIL_BUFFER:
                            internalFormat = GL_DEPTH_STENCIL_EXT;
                            break;

                        // all other buffers are color buffers
                        default:
                            // setup the internal format based on attached texture if such exists, otherwise just default format
                            if (attachment._texture)
                                internalFormat = attachment._texture->getInternalFormat();
                            else
                                internalFormat = GL_RGBA;
                            break;
                        }
                    }
                    fbo_multisample->setAttachment(buffer,
                        osg::FrameBufferAttachment(new osg::RenderBuffer(
                        width, height, internalFormat,
                        samples, colorSamples)));
                }

                if (buffer==osg::Camera::DEPTH_BUFFER) depthAttached = true;
                else if (buffer==osg::Camera::STENCIL_BUFFER) stencilAttached = true;
                else if (buffer==osg::Camera::PACKED_DEPTH_STENCIL_BUFFER)
                {
                    depthAttached = true;
                    stencilAttached = true;
                }
                else if (buffer>=osg::Camera::COLOR_BUFFER) colorAttached = true;

            }

            if (!depthAttached)
            {
                // If doing MSFBO (and therefore need two FBOs, one for multisampled rendering and one for
                // final resolve), then configure "fbo" as the resolve FBO, and When done
                // configuring, swap it into "_resolveFbo" (see line 554). But, if not
                // using MSFBO, then "fbo" is just the render fbo.
                // If using MSFBO, then resolveBuffersMask
                // is the value set by the app for the resolve buffers. But if not using
                // MSFBO, then resolveBuffersMask is the value set by the app for render
                // buffers. In both cases, resolveBuffersMask is used to configure "fbo".
                if( resolveBuffersMask & osg::Camera::IMPLICIT_DEPTH_BUFFER_ATTACHMENT )
                {
                    fbo->setAttachment(osg::Camera::DEPTH_BUFFER, osg::FrameBufferAttachment(new osg::RenderBuffer(width, height, GL_DEPTH_COMPONENT24)));
                    depthAttached = true;
                }
                if (fbo_multisample.valid() &&
                    ( renderBuffersMask & osg::Camera::IMPLICIT_DEPTH_BUFFER_ATTACHMENT ) )
                {
                    fbo_multisample->setAttachment(osg::Camera::DEPTH_BUFFER,
                        osg::FrameBufferAttachment(new osg::RenderBuffer(width,
                        height, GL_DEPTH_COMPONENT24, samples, colorSamples)));
                }
            }
            if (!stencilAttached)
            {
                if( resolveBuffersMask & osg::Camera::IMPLICIT_STENCIL_BUFFER_ATTACHMENT )
                {
                    fbo->setAttachment(osg::Camera::STENCIL_BUFFER, osg::FrameBufferAttachment(new osg::RenderBuffer(width, height, GL_STENCIL_INDEX8_EXT)));
                    stencilAttached = true;
                }
                if (fbo_multisample.valid() &&
                    ( renderBuffersMask & osg::Camera::IMPLICIT_STENCIL_BUFFER_ATTACHMENT ) )
                {
                    fbo_multisample->setAttachment(osg::Camera::STENCIL_BUFFER,
                        osg::FrameBufferAttachment(new osg::RenderBuffer(width,
                        height, GL_STENCIL_INDEX8_EXT, samples, colorSamples)));
                }
            }

            if (!colorAttached)
            {
                if( resolveBuffersMask & osg::Camera::IMPLICIT_COLOR_BUFFER_ATTACHMENT )
                {
                    fbo->setAttachment(osg::Camera::COLOR_BUFFER, osg::FrameBufferAttachment(new osg::RenderBuffer(width, height, GL_RGB)));
                    colorAttached = true;
                }
                if (fbo_multisample.valid() &&
                    ( renderBuffersMask & osg::Camera::IMPLICIT_COLOR_BUFFER_ATTACHMENT ) )
                {
                    fbo_multisample->setAttachment(osg::Camera::COLOR_BUFFER,
                        osg::FrameBufferAttachment(new osg::RenderBuffer(width,
                        height, GL_RGB, samples, colorSamples)));
                }
            }
		
			//使用GLExtensions* ext->glBindFramebuffer
            fbo->apply(state);

            // If no color attachment make sure to set glDrawBuffer/glReadBuffer to none
            // otherwise glCheckFramebufferStatus will fail
            // It has to be done after call to glBindFramebuffer (fbo->apply)
            // and before call to glCheckFramebufferStatus
            if ( !colorAttached )
            {
            #if !defined(OSG_GLES1_AVAILABLE) && !defined(OSG_GLES2_AVAILABLE) && !defined(OSG_GLES3_AVAILABLE)
                setDrawBuffer( GL_NONE, true );
                state.glDrawBuffer( GL_NONE );
            #endif
            }

            GLenum status = ext->glCheckFramebufferStatus(GL_FRAMEBUFFER_EXT);

            if (status != GL_FRAMEBUFFER_COMPLETE_EXT)
            {
                OSG_NOTICE<<"RenderStage::runCameraSetUp(), FBO setup failed, FBO status= 0x"<<std::hex<<status<<std::dec<<std::endl;

                fbo_supported = false;
                GLuint fboId = state.getGraphicsContext() ? state.getGraphicsContext()->getDefaultFboId() : 0;
                ext->glBindFramebuffer(GL_FRAMEBUFFER_EXT, fboId);
                fbo = 0;

                // clean up.
                osg::get<osg::GLRenderBufferManager>(state.getContextID())->flushAllDeletedGLObjects();
                osg::get<osg::GLFrameBufferObjectManager>(state.getContextID())->flushAllDeletedGLObjects();
            }
            else
            {
                setDrawBuffer(GL_NONE, false );
                setReadBuffer(GL_NONE, false );

                _fbo = fbo;

                if (fbo_multisample.valid())
                {
                    fbo_multisample->apply(state);

                    status = ext->glCheckFramebufferStatus(GL_FRAMEBUFFER_EXT);
                    if (status != GL_FRAMEBUFFER_COMPLETE_EXT)
                    {
                        OSG_NOTICE << "RenderStage::runCameraSetUp(), "
                            "multisample FBO setup failed, FBO status = 0x"
                            << std::hex << status << std::dec << std::endl;

                        fbo->apply(state);
                        fbo_multisample = 0;
                        _resolveFbo = 0;

                        // clean up.
                        osg::get<osg::GLRenderBufferManager>(state.getContextID())->flushAllDeletedGLObjects();
                        osg::get<osg::GLFrameBufferObjectManager>(state.getContextID())->flushAllDeletedGLObjects();
                    }
                    else
                    {
                        _resolveFbo.swap(_fbo);
                        _fbo = fbo_multisample;
                    }
                }
                else
                {
                    _resolveFbo = 0;
                }
            }
        }

        if (!fbo_supported)
        {
            if (renderTargetImplementation<renderTargetFallback)
                renderTargetImplementation = renderTargetFallback;
            else
                renderTargetImplementation = osg::Camera::PIXEL_BUFFER_RTT;
        }
    }



}
  • 在osg::FrameBufferObject.cpp中:
void FrameBufferObject::apply(State &state) const
{
    apply(state, READ_DRAW_FRAMEBUFFER);
}

void FrameBufferObject::apply(State &state, BindTarget target) const
{
    unsigned int contextID = state.getContextID();

    if (_unsupported[contextID])
        return;


    GLExtensions* ext = state.get<GLExtensions>();
    if (!ext->isFrameBufferObjectSupported)
    {
        _unsupported[contextID] = 1;
        OSG_WARN << "Warning: EXT_framebuffer_object is not supported" << std::endl;
        return;
    }

    if (_attachments.empty())
    {
        ext->glBindFramebuffer(target, 0);//默认
        return;
    }

    int &dirtyAttachmentList = _dirtyAttachmentList[contextID];

    GLuint &fboID = _fboID[contextID];
    if (fboID == 0)
    {
        ext->glGenFramebuffers(1, &fboID);
        if (fboID == 0)
        {
            OSG_WARN << "Warning: FrameBufferObject: could not create the FBO" << std::endl;
            return;
        }

        dirtyAttachmentList = 1;

    }

    if (dirtyAttachmentList)
    {
        // the set of of attachments appears to be thread sensitive, it shouldn't be because
        // OpenGL FBO handles osg::FrameBufferObject has are multi-buffered...
        // so as a temporary fix will stick in a mutex to ensure that only one thread passes through here
        // at one time.
        // 确保每次只有一个线程通过这里
        static OpenThreads::Mutex s_mutex;
        OpenThreads::ScopedLock<OpenThreads::Mutex> lock(s_mutex);

        // create textures and mipmaps before we bind the frame buffer object
        for (AttachmentMap::const_iterator i=_attachments.begin(); i!=_attachments.end(); ++i)
        {
            const FrameBufferAttachment &fa = i->second;
            //FrameBufferObject.cpp::FrameBufferAttachment::createRequiredTexturesAndApplyGenerateMipMap
            fa.createRequiredTexturesAndApplyGenerateMipMap(state, ext);
        }

    }


    ext->glBindFramebuffer(target, fboID);

    // enable drawing buffers to render the result to fbo
    if ( (target == READ_DRAW_FRAMEBUFFER) || (target == DRAW_FRAMEBUFFER) )
    {
    	//std::vector<GLenum> _drawBuffers;
        if (_drawBuffers.size() > 0)
        {
            GLExtensions *gl2e = state.get<GLExtensions>();
            if (gl2e && gl2e->glDrawBuffers)
            {
                gl2e->glDrawBuffers(_drawBuffers.size(), &(_drawBuffers[0]));
            }
            else
            {
                OSG_WARN <<"Warning: FrameBufferObject: could not set draw buffers, glDrawBuffers is not supported!" << std::endl;
            }
        }
    }

    if (dirtyAttachmentList)
    {
        for (AttachmentMap::const_iterator i=_attachments.begin(); i!=_attachments.end(); ++i)
        {
            const FrameBufferAttachment &fa = i->second;
            switch(i->first)
            {
                case(Camera::PACKED_DEPTH_STENCIL_BUFFER):
                    if (ext->isPackedDepthStencilSupported)
                    {
                        fa.attach(state, target, GL_DEPTH_ATTACHMENT_EXT, ext);
                        fa.attach(state, target, GL_STENCIL_ATTACHMENT_EXT, ext);
                    }
                    else
                    {
                        OSG_WARN <<
                            "Warning: FrameBufferObject: could not attach PACKED_DEPTH_STENCIL_BUFFER, "
                            "EXT_packed_depth_stencil is not supported!" << std::endl;
                    }
                    break;

                default:
                    fa.attach(state, target, convertBufferComponentToGLenum(i->first), ext);
                    break;
            }
        }
        dirtyAttachmentList = 0;
    }

}
GLenum FrameBufferObject::convertBufferComponentToGLenum(BufferComponent attachment_point) const
{
    switch(attachment_point)
    {
        case(Camera::DEPTH_BUFFER): return GL_DEPTH_ATTACHMENT_EXT;
        case(Camera::STENCIL_BUFFER): return GL_STENCIL_ATTACHMENT_EXT;
        case(Camera::COLOR_BUFFER): return GL_COLOR_ATTACHMENT0_EXT;
        default: return GLenum(GL_COLOR_ATTACHMENT0_EXT + (attachment_point-Camera::COLOR_BUFFER0));
    }
}
osgUtil::RenderStage::copyTexture

RenderStage::copyTexture则负责针对FRAME_BUFFER渲染目标,拷贝场景图像到Attachment对象(绑定到摄像机的实际纹理或者图片)中。

osgUtil::RenderStage::drawInner
part1 FBO对象的初始化
void RenderStage::drawInner(osg::RenderInfo& renderInfo,RenderLeaf*& previous, bool& doCopyTexture)
{
    struct SubFunc
    {
        static void applyReadFBO(bool& apply_read_fbo,
            const FrameBufferObject* read_fbo, osg::State& state)
        {
            if (read_fbo->isMultisample())
            {
                OSG_WARN << "Attempting to read from a"
                    " multisampled framebuffer object. Set a resolve"
                    " framebuffer on the RenderStage to fix this." << std::endl;
            }

            if (apply_read_fbo)
            {
                // Bind the monosampled FBO to read from
                read_fbo->apply(state, FrameBufferObject::READ_FRAMEBUFFER);
                apply_read_fbo = false;
            }
        }
    };

    osg::State& state = *renderInfo.getState();

	// 对FBO对象的初始化
    osg::GLExtensions* ext = _fbo.valid() ? state.get<osg::GLExtensions>() : 0;
    // 判断显示卡是否支持FBO以及MRT(多重渲染目标)扩展
    bool fbo_supported = ext && ext->isFrameBufferObjectSupported;

    bool using_multiple_render_targets = fbo_supported && _fbo->hasMultipleRenderingTargets();

    if (!using_multiple_render_targets)
    {
    //此时作为渲染树根节点的渲染台(RenderStage)还将负责使用glDrawBuffer和glReadBuffer分别设置场景绘制缓存和读取缓存的值
    //(用户层次上则使用Camera类的成员函数setDrawBuffer 和setReadBuffer来实现).
    //当两个缓存的值均设置为GL_BACK时,场景的绘制将在后台缓存完成,
    //并可以使用SwapBuffer动作交换前后双缓存的数据,避免场景绘制是产生闪烁。
        #if !defined(OSG_GLES1_AVAILABLE) && !defined(OSG_GLES2_AVAILABLE) && !defined(OSG_GLES3_AVAILABLE)

            if( getDrawBufferApplyMask() )
                state.glDrawBuffer(_drawBuffer);

            if( getReadBufferApplyMask() )
                state.glReadBuffer(_readBuffer);

        #endif
    }

    if (fbo_supported)
    {
        _fbo->apply(state);//glBindFramebuffer
    }

    // do the drawing itself.
    RenderBin::draw(renderInfo,previous);
。。。
}
part2 执行osgUtil::RenderBin::draw->drawImplementation

负责从根节点开始遍历渲染树,并执行各个渲染叶(RenderLeaf)以及上层状态节点(StateGraph)所包含的内容。

RenderBin::draw函数的工作就是调用RenderBin::drawImplementation函数,当然用户也可用自定义的绘制回调(RenderBin::setDrawCallback)代替drawImplementation来完成这一绘制工作。

void RenderBin::draw(osg::RenderInfo& renderInfo,RenderLeaf*& previous)
{
    renderInfo.pushRenderBin(this);

    if (_drawCallback.valid())
    {
        _drawCallback->drawImplementation(this,renderInfo,previous);
    }
    else drawImplementation(renderInfo,previous);

    renderInfo.popRenderBin();
}

osg::State类的几点重要功能:
(1)保存OpenGL的所有状态、模式、属性参数、顶点和索引数据;
(2)提供了对OpenGL状态堆栈的处理机制,对即将进入渲染管线的数据进行优化;
(3)允许用户直接查询各种OpenGL状态的当前值。

  • 这里所述的第二点,对于OpenGL渲染状态堆栈的处理,实际上就是对于OSG状态树(StateGraph)的遍历处理。而各种OpenGL模式的开关设定(也就是我们熟悉的glEnable和glDisable)实际上是通过State::applyMode函数完成;
  • 顶点坐标,法线坐标以及各种顶点和索引数组的设置(即glVertexPointer,glNormalPointer等)也是由State类的相关函数,如setVertexPointer等实现的;各种渲染属性的OpenGL处理函数繁多而复杂,此时State类将使用applyAttribute函数,进而调用不同渲染属性对象的StateAttribute::apply(State&)函数,实现多种多样的渲染特性。

由此可见,osg::State类是OSG与OpenGL的主要接口,场景状态树的遍历者和整合者,也是各种渲染状态,以及顶点值的处理途径。

  • 但是我们早已知道,OSG的顶点坐标和索引信息是由osg::Geometry类负责保存的,那么负责将Geometry对象的数据传递给State对象的,就是渲染树的叶节点RenderLeaf了:
    它通过执行自己所包含的Drawable几何体对象的Drawable::draw函数,实现几何体的实际绘制;而在Geometry类的绘制过程中,则将自己记录的数据信息传递给State对象,由它负责完成顶点的载入和处理工作。

而渲染树在其中的作用,就是抽取每个渲染树节点(RenderBin)中的渲染叶(RenderLeaf)对象,交由osg::State整合它在状态树中继承的全部渲染状态,并将几何体数据传递给OpenGL管线,完成绘制的工作。

osgUtil::RenderBin::drawImplementation ()详见

part3 检测并显示出场景绘制当中遇到的错误
void RenderStage::drawInner(osg::RenderInfo& renderInfo,RenderLeaf*& previous, bool& doCopyTexture)
{
。。。
	if(state.getCheckForGLErrors()!=osg::State::NEVER_CHECK_GL_ERRORS)
    {
        if (state.checkGLErrors("after RenderBin::draw(..)"))
        {
            if ( ext )
            {
                GLenum fbstatus = ext->glCheckFramebufferStatus(GL_FRAMEBUFFER_EXT);
                if ( fbstatus != GL_FRAMEBUFFER_COMPLETE_EXT )
                {
                    OSG_NOTICE<<"RenderStage::drawInner(,) FBO status = 0x"<<std::hex<<fbstatus<<std::dec<<std::endl;
                }
            }
        }
    }
。。。
}
part4 使用glBlitFramebufferEXT进行解算,将结果复制到关联的纹理以及图片对象中(补)
  • 需要特别注意的是:如果希望使用FBO来实现纹理烘焙或者场景截图的话,不可以将场景主摄像机的setRenderTargetImplementation直接设置为相应的枚举量,那样将无法正常地看到场景(因为主摄像机对应的渲染台已经将场景绘制的结果绑定到FBO上了)。
  • 正确的作法是在场景树中增加一个Camera节点,设置“渲染目标实现方式”为FBO方式;并通过Camera::setRenderOrder设定它的渲染顺序,设置为PRE_RENDER可以保证这个摄像机在主场景之前执行绘制(它创建了一个“前序渲染台”,存入RenderStage::_preRenderList列表),从而实现“渲染到纹理”的效果。参见osgprerender例子以及第二十二日所述CullVisitor::apply(Camera&)函数的内容。
void RenderStage::drawInner(osg::RenderInfo& renderInfo,RenderLeaf*& previous, bool& doCopyTexture)
{
。。。
	 const FrameBufferObject* read_fbo = fbo_supported ? _fbo.get() : 0;
    bool apply_read_fbo = false;

    if (fbo_supported && _resolveFbo.valid() && ext->glBlitFramebuffer)
    {
        GLbitfield blitMask = 0;
        bool needToBlitColorBuffers = false;

        //find which buffer types should be copied
        for (FrameBufferObject::AttachmentMap::const_iterator
            it = _resolveFbo->getAttachmentMap().begin(),
            end =_resolveFbo->getAttachmentMap().end(); it != end; ++it)
        {
            switch (it->first)
            {
            case Camera::DEPTH_BUFFER:
                blitMask |= GL_DEPTH_BUFFER_BIT;
                break;
            case Camera::STENCIL_BUFFER:
                blitMask |= GL_STENCIL_BUFFER_BIT;
                break;
            case Camera::PACKED_DEPTH_STENCIL_BUFFER:
                blitMask |= GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT;
                break;
            case Camera::COLOR_BUFFER:
                blitMask |= GL_COLOR_BUFFER_BIT;
                break;
            default:
                needToBlitColorBuffers = true;
                break;
            }
        }

        // Bind the resolve framebuffer to blit into.
        _fbo->apply(state, FrameBufferObject::READ_FRAMEBUFFER);
        _resolveFbo->apply(state, FrameBufferObject::DRAW_FRAMEBUFFER);

        if (blitMask)
        {
            // Blit to the resolve framebuffer.
            // Note that (with nvidia 175.16 windows drivers at least) if the read
            // framebuffer is multisampled then the dimension arguments are ignored
            // and the whole framebuffer is always copied.
            ext->glBlitFramebuffer(
                static_cast<GLint>(_viewport->x()), static_cast<GLint>(_viewport->y()),
                static_cast<GLint>(_viewport->x() + _viewport->width()), static_cast<GLint>(_viewport->y() + _viewport->height()),
                static_cast<GLint>(_viewport->x()), static_cast<GLint>(_viewport->y()),
                static_cast<GLint>(_viewport->x() + _viewport->width()), static_cast<GLint>(_viewport->y() + _viewport->height()),
                blitMask, GL_NEAREST);
        }

#if !defined(OSG_GLES1_AVAILABLE) && !defined(OSG_GLES2_AVAILABLE) && !defined(OSG_GLES3_AVAILABLE)
        if (needToBlitColorBuffers)
        {
            for (FrameBufferObject::AttachmentMap::const_iterator
                it = _resolveFbo->getAttachmentMap().begin(),
                end =_resolveFbo->getAttachmentMap().end(); it != end; ++it)
            {
                osg::Camera::BufferComponent attachment = it->first;
                if (attachment >=osg::Camera::COLOR_BUFFER0)
                {
                    state.glReadBuffer(GL_COLOR_ATTACHMENT0_EXT + (attachment - osg::Camera::COLOR_BUFFER0));
                    state.glDrawBuffer(GL_COLOR_ATTACHMENT0_EXT + (attachment - osg::Camera::COLOR_BUFFER0));

                    ext->glBlitFramebuffer(
                        static_cast<GLint>(_viewport->x()), static_cast<GLint>(_viewport->y()),
                        static_cast<GLint>(_viewport->x() + _viewport->width()), static_cast<GLint>(_viewport->y() + _viewport->height()),
                        static_cast<GLint>(_viewport->x()), static_cast<GLint>(_viewport->y()),
                        static_cast<GLint>(_viewport->x() + _viewport->width()), static_cast<GLint>(_viewport->y() + _viewport->height()),
                        GL_COLOR_BUFFER_BIT, GL_NEAREST);
                }
            }
            // reset the read and draw buffers?  will comment out for now with the assumption that
            // the buffers will be set explicitly when needed elsewhere.
            // glReadBuffer(GL_COLOR_ATTACHMENT0_EXT);
            // glDrawBuffer(GL_COLOR_ATTACHMENT0_EXT);
        }
#endif

        apply_read_fbo = true;
        read_fbo = _resolveFbo.get();

        using_multiple_render_targets = read_fbo->hasMultipleRenderingTargets();
    }

    // now copy the rendered image to attached texture.
    if (doCopyTexture)
    {
        if (read_fbo) SubFunc::applyReadFBO(apply_read_fbo, read_fbo, state);
        copyTexture(renderInfo);
    }

    for(std::map< osg::Camera::BufferComponent, Attachment>::const_iterator itr = _bufferAttachmentMap.begin();
        itr != _bufferAttachmentMap.end();
        ++itr)
    {
        if (itr->second._image.valid())
        {
            if (read_fbo) SubFunc::applyReadFBO(apply_read_fbo, read_fbo, state);

            #if !defined(OSG_GLES1_AVAILABLE) && !defined(OSG_GLES2_AVAILABLE)

                if (using_multiple_render_targets)
                {
                    int attachment=itr->first;
                    if (attachment==osg::Camera::DEPTH_BUFFER || attachment==osg::Camera::STENCIL_BUFFER) {
                        // assume first buffer rendered to is the one we want
                        glReadBuffer(read_fbo->getMultipleRenderingTargets()[0]);
                    } else {
                        glReadBuffer(GL_COLOR_ATTACHMENT0_EXT + (attachment - osg::Camera::COLOR_BUFFER0));
                    }
                } else {
                    if (_readBuffer != GL_NONE)
                    {
                        glReadBuffer(_readBuffer);
                    }
                }

            #endif

            GLenum pixelFormat = itr->second._image->getPixelFormat();
            if (pixelFormat==0) pixelFormat = _imageReadPixelFormat;
            if (pixelFormat==0) pixelFormat = GL_RGB;

            GLenum dataType = itr->second._image->getDataType();
            if (dataType==0) dataType = _imageReadPixelDataType;
            if (dataType==0) dataType = GL_UNSIGNED_BYTE;

            itr->second._image->readPixels(static_cast<int>(_viewport->x()),
                                           static_cast<int>(_viewport->y()),
                                           static_cast<int>(_viewport->width()),
                                           static_cast<int>(_viewport->height()),
                                           pixelFormat, dataType);
        }
    }

    if (fbo_supported)
    {
        if (getDisableFboAfterRender())
        {
            // switch off the frame buffer object
            GLuint fboId = state.getGraphicsContext() ? state.getGraphicsContext()->getDefaultFboId() : 0;
            ext->glBindFramebuffer(GL_FRAMEBUFFER_EXT, fboId);
        }

        doCopyTexture = true;
    }

    if (fbo_supported && _camera.valid())
    {
        // now generate mipmaps if they are required.
        const osg::Camera::BufferAttachmentMap& bufferAttachments = _camera->getBufferAttachmentMap();
        for(osg::Camera::BufferAttachmentMap::const_iterator itr = bufferAttachments.begin();
            itr != bufferAttachments.end();
            ++itr)
        {
            if (itr->second._texture.valid() && itr->second._mipMapGeneration)
            {
                state.setActiveTextureUnit(0);
                state.applyTextureAttribute(0, itr->second._texture.get());
                ext->glGenerateMipmap(itr->second._texture->getTextureTarget());
            }
        }
    }
}
。。。
}



   
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值