[osg-users] [GL ES2.0] Render to texture

Jean Baptiste Poquelin coudboule at yahoo.fr
Wed Apr 15 03:04:02 PDT 2015


Hi,

I am trying to render a scene inside a texture using OpenGL ES 2.0 in the particular context of a server side renderer using render node and GBM. Here is a sample of what I am trying to achieve:


Code:

static const EGLint conf_att[] =
{
	EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
	EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
	EGL_RED_SIZE, 1,
	EGL_GREEN_SIZE, 1,
	EGL_BLUE_SIZE, 1,
	EGL_ALPHA_SIZE, 0,
	EGL_NONE,
};
static const EGLint ctx_att[] =
{
	EGL_CONTEXT_CLIENT_VERSION, 2,
	EGL_NONE
};
EGLBoolean b;
EGLenum api;
EGLint major, minor, n;
EGLConfig egl_conf;
	struct gbm_device *gbm_dev;

int rnode = rnode_open();
gbm_dev = gbm_create_device(rnode);
if(!gbm_dev)
{
	std::cerr << "cannot create gbm device" << std::endl;
	return;
}

EGLDisplay egl_display = eglGetDisplay((EGLNativeDisplayType)gbm_dev);
if(!egl_display)
{
	std::cerr << "cannot create EGL display" << std::endl;
	return;
}

b = eglInitialize(egl_display, &major, &minor);
if(!b)
{
	std::cerr << "cannot initialize EGL" << std::endl;
	return;
}

std::cout << "EGL major/minor: " << major << "." << minor << std::endl;
std::cout << "EGL version: " << eglQueryString(egl_display, EGL_VERSION) << std::endl;
std::cout << "EGL vendor: " << eglQueryString(egl_display, EGL_VENDOR) << std::endl;
std::cout << "EGL extensions: " << eglQueryString(egl_display, EGL_EXTENSIONS) << std::endl;

api = EGL_OPENGL_API;
b = eglBindAPI(api);
if(!b)
{
	std::cerr << "cannot bind OpenGLES API" << std::endl;
	return;
}

b = eglChooseConfig(egl_display, conf_att, &egl_conf, 1, &n);

if(!b || n != 1)
{
	std::cerr << "cannot find suitable EGL config" << std::endl;
	return;
}

EGLContext egl_ctx = eglCreateContext(egl_display, egl_conf, EGL_NO_CONTEXT, ctx_att);
if(!egl_ctx)
{
	std::cerr << "cannot create EGL context" << std::endl;
	return;
}

eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, egl_ctx);

osg::ref_ptr<osgViewer::Viewer> viewer = new osgViewer::Viewer();
osg::ref_ptr<osgViewer::GraphicsWindow> graphicsWindow = viewer->setUpViewerAsEmbeddedInWindow(0, 0, width, height);
if(!graphicsWindow->valid())
{
	std::cerr << "Viewer window is invalid." << std::endl;
	return;
}

viewer->setThreadingModel(osgViewer::ViewerBase::SingleThreaded);

viewer->realize();

if(!viewer->isRealized())
{
	std::cerr << "No viewer window is realized." << std::endl;
	return;
}

osgViewer::Viewer::Windows windows;
viewer->getWindows(windows);
for(osgViewer::Viewer::Windows::iterator itr = windows.begin();itr != windows.end();++itr)
{
  (*itr)->getState()->setUseModelViewAndProjectionUniforms(true);
  (*itr)->getState()->setUseVertexAttributeAliasing(true);
}

// create texture
texture = new osg::Texture2D;
texture->setTextureSize(width, height);
texture->setInternalFormat(GL_RGBA);
texture->setSourceFormat(GL_RGBA);
texture->setSourceType(GL_FLOAT);
texture->setFilter(osg::Texture2D::MIN_FILTER, osg::Texture2D::LINEAR_MIPMAP_LINEAR);
texture->setFilter(osg::Texture2D::MAG_FILTER, osg::Texture2D::LINEAR);
texture->setWrap(osg::Texture2D::WRAP_S, osg::Texture2D::CLAMP_TO_EDGE);
texture->setWrap(osg::Texture2D::WRAP_T, osg::Texture2D::CLAMP_TO_EDGE);

// attach texture to camera
osg::Camera *camera = viewer->getCamera();
camera->setViewport(0, 0, texture->getTextureWidth(), texture->getTextureHeight());
camera->setClearColor(osg::Vec4(1.0f, 1.0f, 1.0f, 0.0f));
camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);

camera->setRenderOrder(osg::Camera::PRE_RENDER);
camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
camera->attach(osg::Camera::COLOR_BUFFER, texture);

camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);

osg::State &state = graphicsWindow->getState();
state.initializeExtensionProcs();
texture->apply(state);

// create an EGL image from that texture
EGLImageKHR image = eglCreateImageKHR(egl_display, egl_ctx,
						   EGL_GL_TEXTURE_2D_KHR,
						   (EGLClientBuffer)(unsigned long)texture->getTextureObject(0)->id(),
						   NULL);

std::cout << "got image " << image << std::endl;

EGLint handle;
int stride;
b = eglExportDRMImageMESA(egl_display, image, NULL, &handle, &stride);
if(!b)
{
	std::cerr << "failed to export image" << std::endl;
	return;
}

std::cout << "image exported " << handle << " " << stride << std::endl;
int fd;
int r = drmPrimeHandleToFD(rnode, handle, DRM_CLOEXEC, &fd);
if(r < 0)
{
	std::cerr << "cannot get prime-fd for handle" << std::endl;
	return;
}

// create quad
osg::ref_ptr<osg::Vec3Array> vertices = new osg::Vec3Array;
vertices->push_back(osg::Vec3(-0.5f, 0.0f,-0.5f));
vertices->push_back(osg::Vec3(0.5f, 0.0f,-0.5f));
vertices->push_back(osg::Vec3(0.5f, 0.0f, 0.5f));
vertices->push_back(osg::Vec3(-0.5f, 0.0f, 0.5f));
osg::ref_ptr<osg::Vec3Array> normals = new osg::Vec3Array;
normals->push_back(osg::Vec3(0.0f,-1.0f, 0.0f));
osg::ref_ptr<osg::Vec2Array> texcoords = new osg::Vec2Array;
texcoords->push_back(osg::Vec2(0.0f, 0.0f));
texcoords->push_back(osg::Vec2(0.0f, 1.0f));
texcoords->push_back(osg::Vec2(1.0f, 1.0f));
texcoords->push_back(osg::Vec2(1.0f, 0.0f));

osg::ref_ptr<osg::Geometry> quad = new osg::Geometry;
quad->setVertexArray(vertices.get());
quad->setNormalArray(normals.get());
quad->setNormalBinding(osg::Geometry::BIND_OVERALL);
quad->setTexCoordArray(0, texcoords.get());
quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS, 0, 4));

osg::ref_ptr<osg::Texture2D> texture2 = new osg::Texture2D;
osg::ref_ptr<osg::Image> image = osgDB::readImageFile("lz.rgb");
texture2->setImage(image.get());

osg::ref_ptr<osg::Geode> root = new osg::Geode;
root->addDrawable(quad.get());
root->getOrCreateStateSet()->setTextureAttributeAndModes(0, texture2.get());

camera->addChild(root.get());

while(!viewer->done())
{
	viewer->frame();
}




The relevant part is:


Code:

// create texture
texture = new osg::Texture2D;
texture->setTextureSize(width, height);
texture->setInternalFormat(GL_RGBA);
texture->setSourceFormat(GL_RGBA);
texture->setSourceType(GL_FLOAT);
texture->setFilter(osg::Texture2D::MIN_FILTER, osg::Texture2D::LINEAR_MIPMAP_LINEAR);
texture->setFilter(osg::Texture2D::MAG_FILTER, osg::Texture2D::LINEAR);
texture->setWrap(osg::Texture2D::WRAP_S, osg::Texture2D::CLAMP_TO_EDGE);
texture->setWrap(osg::Texture2D::WRAP_T, osg::Texture2D::CLAMP_TO_EDGE);

// attach texture to camera
osg::Camera *camera = viewer->getCamera();
camera->setViewport(0, 0, texture->getTextureWidth(), texture->getTextureHeight());
camera->setClearColor(osg::Vec4(1.0f, 1.0f, 1.0f, 0.0f));
camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);

camera->setRenderOrder(osg::Camera::PRE_RENDER);
camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
camera->attach(osg::Camera::COLOR_BUFFER, texture);

camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);

osg::State &state = graphicsWindow->getState();
state.initializeExtensionProcs();
texture->apply(state);

...

osg::ref_ptr<osg::Geode> root = new osg::Geode;
root->addDrawable(quad.get());
root->getOrCreateStateSet()->setTextureAttributeAndModes(0, texture2.get());

camera->addChild(root.get());




The quad object is not drawn inside the texture. Can you explain to me why?

OSG is compiled for OpenGL ES 2.0:
OSG_GL1_AVAILABLE 	OFF
OSG_GL2_AVAILABLE 	OFF
OSG_GL3_AVAILABLE 	OFF
OSG_GLES1_AVAILABLE 	OFF
OSG_GLES2_AVAILABLE 	ON
OPENGL_INCLUDE_DIR (must manually "Add Entry" in Cmake) 	/pathtogles/include/
OPENGL_LIBRARY 	/pathtogles/lib/libGLESv2.so
OPENGL_egl_LIBRARY 	/pathtoegllib/libEGL.so
OSG_GL_DISPLAYLISTS_AVAILABLE 	OFF
SG_GL_MATRICES_AVAILABLE 	OFF
OSG_GL_VERTEX_FUNCS_AVAILABLE 	OFF
OSG_GL_VERTEX_ARRAY_FUNCS_AVAILABLE 	OFF
OSG_GL_FIXED_FUNCTION_AVAILABLE 	OFF
OSG_CPP_EXCEPTIONS_AVAILABLE 	OFF 

Best regards,
Jean Baptiste[/code]

------------------
Read this topic online here:
http://forum.openscenegraph.org/viewtopic.php?p=63410#63410








More information about the osg-users mailing list