[osg-users] Capture image on iOS get black image
duc nguyen
osgforum at tevs.eu
Mon Dec 26 16:58:15 PST 2016
Hi,
I’m developing app on iOS using OpenSceneGraph with GLES 2, my app have a feature capture photo from osgviewer to save image file. I try to use some class i found in some topic below but the image i get is total black
Code:
class SnapImageDrawCallback : public osg::Camera::DrawCallback
{
public:
SnapImageDrawCallback()
{
_snapImageOnNextFrame = false;
}
void setFileName(const std::string& filename) { _filename = filename; }
const std::string& getFileName() const { return _filename; }
void setSnapImageOnNextFrame(bool flag) { _snapImageOnNextFrame = flag; }
bool getSnapImageOnNextFrame() const { return _snapImageOnNextFrame; }
virtual void operator () (const osg::Camera& camera) const
{
//osg::notify(osg::NOTICE) << "Saved screen image to `"<<_filename<<"`"<< std::endl;
if (!_snapImageOnNextFrame) return;
//int x,y,width,height;
//int x =0,y=0,width=768*2,height=709*2;
int x,y,width,height;
x = camera.getViewport()->x();
y = camera.getViewport()->y();
width = camera.getViewport()->width();
height = camera.getViewport()->height();
//camera.getViewport() getViewport(x,y,width,height);
osg::ref_ptr<osg::Image> image = new osg::Image;
image->readPixels(x,y,width,height,GL_RGB,GL_UNSIGNED_BYTE);
if (osgDB::writeImageFile(*image,_filename))
{
osg::notify(osg::NOTICE) << "Saved screen image to `"<<_filename<<"`"<< std::endl;
}
_snapImageOnNextFrame = false;
}
protected:
std::string _filename;
mutable bool _snapImageOnNextFrame;
};
class WindowCaptureCallback : public osg::Camera::DrawCallback
{
public:
WindowCaptureCallback(GLenum readBuffer, const std::string& name):
_readBuffer(readBuffer),
_fileName(name)
{
_image = new osg::Image;
}
virtual void operator () (osg::RenderInfo& renderInfo) const
{
#if !defined(OSG_GLES1_AVAILABLE) && !defined(OSG_GLES2_AVAILABLE)
glReadBuffer(_readBuffer);
#else
osg::notify(osg::NOTICE)<<"Error: GLES unable to do glReadBuffer"<<std::endl;
#endif
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
osg::GraphicsContext* gc = renderInfo.getState()->getGraphicsContext();
if (gc->getTraits())
{
GLenum pixelFormat;
if (gc->getTraits()->alpha)
pixelFormat = GL_RGBA;
else
pixelFormat = GL_RGB;
#if defined(OSG_GLES1_AVAILABLE) || defined(OSG_GLES2_AVAILABLE)
if (pixelFormat == GL_RGB)
{
GLint value = 0;
#ifndef GL_IMPLEMENTATION_COLOR_READ_FORMAT
#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B
#endif
glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT, &value);
if ( value != GL_RGB ||
value != GL_UNSIGNED_BYTE )
{
pixelFormat = GL_RGBA;//always supported
}
}
#endif
int width = gc->getTraits()->width;
int height = gc->getTraits()->height;
std::cout<<"Capture: size="<<width<<"x"<<height<<", format="<<(pixelFormat == GL_RGBA ? "GL_RGBA":"GL_RGB")<<std::endl;
_image->readPixels(0, 0, width, height, pixelFormat, GL_UNSIGNED_BYTE);
}
if (!_fileName.empty())
{
std::cout << "Writing to: " << _fileName << std::endl;
osgDB::writeImageFile(*_image, _fileName);
}
}
protected:
GLenum _readBuffer;
std::string _fileName;
osg::ref_ptr<osg::Image> _image;
mutable OpenThreads::Mutex _mutex;
};
@implementation CreateGroupViewController
- (void)viewDidLoad {
[super viewDidLoad];
OSGAdapter *osgAdapter = [[OSGAdapter alloc] init];
//osg::setNotifyLevel(osg::DEBUG_INFO);
//get the screen size
unsigned int w = sceneView.frame.size.width;
unsigned int h = sceneView.frame.size.height;
//create root
_root = new osg::MatrixTransform();
//create the viewer
_viewer = new osgViewer::Viewer();
_viewer->getCamera()->setClearColor(osg::Vec4(0.25,0.4,0.0,0.0));
// try msaa. available for iOS >= 4.0
osg::ref_ptr<osg::DisplaySettings> settings = osg::DisplaySettings::instance();
settings->setNumMultiSamples(4);
if(1) {
//create our graphics context directly so we can pass our own window
osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits();
// Init the Windata Variable that holds the handle for the Window to display OSG in.
osg::ref_ptr<osg::Referenced> windata = new osgViewer::GraphicsWindowIOS::WindowData(self.sceneView);
// Setup the traits parameters
traits->x = 0;
traits->y = 0;
traits->width = w*2 - 0;
traits->height = h*2 - 0;
traits->depth = 16; //keep memory down, default is currently 24
traits->windowDecoration = false;
traits->doubleBuffer = true;
traits->sharedContext = NULL;
traits->setInheritedWindowPixelFormat = true;
traits->samples = 4;
traits->sampleBuffers = 1;
traits->supportsResize = true;
traits->inheritedWindowData = windata;
//Create the Graphics Context
osg::ref_ptr<osg::GraphicsContext> graphicsContext = osg::GraphicsContext::createGraphicsContext(traits.get());
// if the context was created then attach to our viewer
if(graphicsContext)
{
_viewer->getCamera()->setGraphicsContext(graphicsContext);
_viewer->getCamera()->setViewport(new osg::Viewport(0, 0, traits->width, traits->height));
_viewer->getCamera()->setProjectionMatrixAsPerspective(30.0f, static_cast<double>(traits->width)/static_cast<double>(traits->height), 1.0f, 10000.0f);
}
}
_root = [osgAdapter loadData];
osg::Camera* hud_camera = [osgAdapter createHUD: (w * 2) : (h * 2)];
_root->addChild(hud_camera);
_viewer->setSceneData(_root.get());
_viewer->setCameraManipulator(new osgGA::MultiTouchTrackballManipulator());
//_viewer->addEventHandler(new TestMultiTouchEventHandler(hud_camera));
// sun single-threaded
_viewer->setThreadingModel(osgViewer::Viewer::SingleThreaded);
_viewer->realize();
// render a frame so the window-manager shows some content and not only an empty + black window
_viewer->frame();
// create a display link, which will update our scene on every screen-refresh
_displayLink = [[UIScreen mainScreen] displayLinkWithTarget:self selector:@selector(updateScene)];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
osg::ref_ptr<SnapImageDrawCallback> snapImageDrawCallback = new
SnapImageDrawCallback();
_viewer->getCamera()->setPostDrawCallback (snapImageDrawCallback.get());
}
//
//Timer called function to update our scene and render the viewer
//
- (void)updateScene {
_viewer->frame();
}
- (IBAction)createImage:(id)sender {
NSString* imageName = @"screenshot.png"
_viewer->getViewerBase()->stopThreading();
// use SnapImageDrawCallback class
osg::ref_ptr<SnapImageDrawCallback> snapImageDrawCallback =
dynamic_cast<SnapImageDrawCallback*>
(_viewer->getCamera()->getPostDrawCallback());
if(snapImageDrawCallback.get())
{
std::cout << "make screenshot" << std::endl;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *fullPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@", imageName]];
snapImageDrawCallback->setFileName([fullPath UTF8String]);
snapImageDrawCallback->setSnapImageOnNextFrame(true);
}
else
{
std::cout << "Warning: no make screenshot" << std::endl;
}
// use WindowCaptureCallback class
// GLenum buffer = _viewer->getCamera()->getGraphicsContext()->getTraits()->doubleBuffer ? GL_BACK : GL_FRONT;
// _viewer->getCamera()->setFinalDrawCallback(new WindowCaptureCallback(buffer, [fullPath UTF8String]));
}
Could you please help me resolve that.
...
Thank you!
Cheers,
duc
------------------
Read this topic online here:
http://forum.openscenegraph.org/viewtopic.php?p=69771#69771
More information about the osg-users
mailing list