[osg-users] Disappearing text
Ronny Hatteland
ronnyhatteland at gmail.com
Mon Apr 18 08:30:38 PDT 2016
> That should not be needed at all, and indicative that something somewhere in your application or it's usage of the OSG is messing up the GL object management.
>
>
> The resizeGLObjbectBuffer() is only needed when graphics context are created or destroyed.
>
>
>
> As to what is going wrong your case I can't say - something *very* odd is happening. Are you creating your own graphics context / modifying the viewer?
>
>
> Robert.
Yes I am creating my own GraphicsContext:
Code:
// Local Variable to hold window size data
RECT rect;
// Get the current window size
::GetWindowRect(m_hWnd, &rect);
// Init the GraphicsContext Traits
osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits;
// Init the Windata Variable that holds the handle for the Window to display OSG in.
osg::ref_ptr<osg::Referenced> windata = new osgViewer::GraphicsWindowWin32::WindowData(m_hWnd);
// Setup the traits parameters
traits->x = 0;
traits->y = 0;
traits->width = (rect.right - rect.left);
traits->height = ( rect.bottom - rect.top);
traits->windowDecoration = true; // CHECK DIFFERENCE
traits->doubleBuffer = false;
traits->setInheritedWindowPixelFormat = true;
traits->inheritedWindowData = windata;
traits->samples = samples;
traits->vsync = false;
GraphicsContext* gc = osg::GraphicsContext::createGraphicsContext(traits.get());
traits->x = 0;
traits->y = 0;
return gc;
which afterwards is used as follows:
Code:
m_defaultGraphicsContext->clear();
m_defaultGraphicsContext->setName("DefaultViewGraphicsContext");
m_defaultGraphicsContext->getState()->setUseModelViewAndProjectionUniforms(true); //ADDED FOR SHADER
m_defaultGraphicsContext->getState()->setUseVertexAttributeAliasing(true);
if (m_defaultGraphicsContext->valid())
{
defaultCamera = mViewer->getCamera();
defaultCamera->setCullMask(0x04);
defaultCamera->getOrCreateStateSet()->setMode(GL_LIGHTING, osg::StateAttribute::OFF );
defaultCamera->getOrCreateStateSet()->setMode(osg::StateAttribute::SCISSOR , osg::StateAttribute::OFF | osg::StateAttribute::PROTECTED);
defaultCamera->getOrCreateStateSet()->removeAttribute(osg::StateAttribute::MATERIAL);
defaultCamera->getView()->setLightingMode(osg::View::NO_LIGHT);
defaultCamera->setGraphicsContext(m_defaultGraphicsContext);
defaultCamera->setViewport(new osg::Viewport(m_defaultGraphicsContext->getTraits()->x, m_defaultGraphicsContext->getTraits()->y, m_defaultGraphicsContext->getTraits()->width, m_defaultGraphicsContext->getTraits()->height));
}
double fovy, aspectRatio;
mViewer->getCamera()->getProjectionMatrixAsPerspective(fovy, aspectRatio, defaultCameraNear, defaultCameraFar);
aspectRatio = double(m_defaultGraphicsContext->getTraits()->width) / double(m_defaultGraphicsContext->getTraits()->height);
mViewer->getCamera()->setProjectionMatrixAsPerspective(fovy, aspectRatio, 0.0000000001f, defaultCameraFar);
initOpenGL(m_defaultGraphicsContext, maxNumUniforms, maxUniformBlockSize);
Code:
void initOpenGL(osg::ref_ptr<osg::GraphicsContext> context, GLint& maxNumUniforms, GLint& maxUniformBlockSize) {
context->realize();
context->makeCurrent();
maxNumUniforms = 0;
glGetIntegerv(GL_MAX_VERTEX_UNIFORM_COMPONENTS, &maxNumUniforms);
maxUniformBlockSize = 0;
glGetIntegerv(GL_MAX_UNIFORM_BLOCK_SIZE, &maxUniformBlockSize);
// init glew
///glewExperimental = GL_TRUE;
/// if (GLEW_ARB_vertex_buffer_object == FALSE || GLEW_ARB_vertex_array_object == FALSE) return;
#ifdef USE_INSTANCED_DRAWING
glewInit();
#endif
context->releaseContext();
// ATI driver 11.6 didn't return right number of uniforms which lead to a crash, when the vertex shader was compiled(WTF?!)
#ifdef ATI_FIX
maxNumUniforms = 576;
maxUniformBlockSize = 16384;
#endif
}
Maybe you can spot something that is clearly wrong here, otherwise it could be related to the glewInit()?
------------------
Read this topic online here:
http://forum.openscenegraph.org/viewtopic.php?p=66864#66864
More information about the osg-users
mailing list