Reputation: 8845
I am having a pile of difficulties taking a UIImage snapshot of a Isgl3d controlled view. Seems whatever I do, I just end up with a black square.
I have a working camera view and a 3d model in my view, I try with both buffer methods and regular screen captures to get a image out of it, but without any fruitful result.
Does anyone have some source code where they successfully take a picture of a Isgl3d view ?
Upvotes: 10
Views: 788
Reputation: 2543
I come up with this possible solution. You have to modify a little bit the library of isgl3d.
The steps are:
1.
Create delegate for Isgl3dGLContext1:
In Isgl3dGLContext1.h
@protocol ScreenShooterDelegate;
#import <OpenGLES/ES1/gl.h>
#import <OpenGLES/ES1/glext.h>
#import "Isgl3dGLContext.h"
@interface Isgl3dGLContext1 : Isgl3dGLContext {
NSObject<ScreenShooterDelegate>* __unsafe_unretained delegate;
GLuint _colorRenderBuffer;
@private
EAGLContext * _context;
// The OpenGL names for the framebuffer and renderbuffer used to render to this view
GLuint _defaultFrameBuffer;
GLuint _depthAndStencilRenderBuffer;
GLuint _depthRenderBuffer;
GLuint _stencilRenderBuffer;
// OpenGL MSAA buffers
GLuint _msaaFrameBuffer;
GLuint _msaaColorRenderBuffer;
GLuint _msaaDepthAndStencilRenderBuffer;
GLuint _msaaDepthRenderBuffer;
GLuint _msaaStencilRenderBuffer;
}
- (id) initWithLayer:(CAEAGLLayer *) layer;
@property (assign) NSObject<ScreenShooterDelegate>* delegate;
@property BOOL takePicture;
@property GLuint colorRenderBuffer;
@end
@protocol ScreenShooterDelegate
@optional
- (void)takePicture;
@end
2.
. Add this code to Isgl3dGLContext1.m:
@synthesize takePicture;
@synthesize colorRenderBuffer = _colorRenderBuffer;
Before line [_context presentRenderbuffer:GL_RENDERBUFFER_OES]; in - (void) finalizeRender:
if(takePicture){
takePicture=NO;
if([delegate respondsToSelector:@selector(takePicture)]){
[delegate takePicture];
}
}
3 Put this code in the class where you want to take screenshot:
In Class.h add <ScreenShooterDelegate>
In the method Class.m
[Isgl3dDirector sharedInstance].antiAliasingEnabled = NO;
Photos3DAppDelegate *appDelegate = (Photos3DAppDelegate *)[[UIApplication sharedApplication] delegate];
[appDelegate.inOutSceneView showSphere];
Isgl3dEAGLView* eaglview=(Isgl3dEAGLView*)[[Isgl3dDirector sharedInstance] openGLView];
Isgl3dGLContext1 * _glContext=(Isgl3dGLContext1*)[eaglview glContext];
_glContext.delegate=self;
_glContext.takePicture=YES;
In method -(void)takePicture{} Put the code from Apple and in the end of method add [Isgl3dDirector sharedInstance].antiAliasingEnabled = YES; (In case if you use it)
//https://developer.apple.com/library/ios/#qa/qa1704/_index.html
-(void)takePicture{
NSLog(@"Creating Foto");
GLint backingWidth, backingHeight;
Isgl3dEAGLView* eaglview=(Isgl3dEAGLView*)[[Isgl3dDirector sharedInstance] openGLView];
//Isgl3dGLContext1 * _glContext=(Isgl3dGLContext1*)[eaglview glContext];
//glBindRenderbufferOES(GL_RENDERBUFFER_OES, _glContext.colorRenderBuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
NSInteger widthInPoints, heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions) {
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width / scale;
heightInPoints = height / scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else {
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
[Isgl3dDirector sharedInstance].antiAliasingEnabled = YES;
}
Note: For me worked just commenting glBindRenderbufferOES(GL_RENDERBUFFER_OES, _colorRenderbuffer); and In your case you may do these steps with Isgl3dGLContext2 instead of the Isgl3dGLContext1.
Upvotes: 2
Reputation: 70998
Here are Apple's instructions & official code for snapshotting a GL view to a UIImage
(takes into account retina displays, flipped coords, etc) and I've been using it successfully. This isn't iSGL3D-specific, of course, but as long as you can get the right context and framebuffer to bind to, it should do the right thing. (As the page notes, you must be sure to take the snapshot before a -presentRenderbuffer:
is called so the renderbuffer is valid.)
https://developer.apple.com/library/ios/#qa/qa1704/_index.html
I have only a cursory familiarity with the iSGL3D library, and it doesn't look like there are obvious hooks there to let you render the scene but not present it (or render it to an offscreen buffer first). The place where you might need to intervene is in the -finalizeRender
method of the Isgl3dGLContext
subclass you're using, just prior to the call to -presentRenderbuffer
call. That context is an internal framework class here, so you might need to shuffle things around a little bit within the library to set up (say) a delegate from the context that works back out of the view and the director to eventually ask your app to take any action just prior to the "present" call, during which you could choose to run the screenshot code if you wanted to, or do nothing if you didn't want to.
Upvotes: 5
Reputation:
I use this code snippet successfully in one of my apps to do OpenGL screenshots.
enum {
red,
green,
blue,
alpha
};
- (UIImage *)glToUIImage {
CGSize glSize = self.glView.bounds.size;
NSInteger bufDataLen = glSize.width * glSize.height * 4;
// Allocate array and read pixels into it.
GLubyte *buffer = (GLubyte *)malloc(bufDataLen);
glReadPixels(0, 0, glSize.width, glSize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
// We need to flip the image
NSUInteger maxRow = (NSInteger)glSize.height - 1;
NSUInteger bytesPerRow = (NSInteger)glSize.width * 4;
GLubyte *buffer2 = (GLubyte *)malloc(bufDataLen);
for(int y = maxRow; y >= 0; y--) {
for(int x = 0; x < bytesPerRow; x+=4) {
NSUInteger c0 = y * bytesPerRow + x;
NSUInteger c1 = (maxRow - y) * bytesPerRow + x;
buffer2[c0+red] = buffer[c1+red];
buffer2[c0+green] = buffer[c1+green];
buffer2[c0+blue] = buffer[c1+blue];
buffer2[c0+alpha] = buffer[c1+alpha];
}
}
free(buffer);
// Make data provider with data
CFDataRef imageData = CFDataCreate(NULL, buffer2, bufDataLen);
free(buffer2);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(imageData);
CFRelease(imageData);
// Bitmap format
int bitsPerComponent = 8;
int bitsPerPixel = 32;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
// Create the CGImage
CGImageRef imageRef = CGImageCreate(glSize.width,
glSize.height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpaceRef,
bitmapInfo,
provider,
NULL,
NO,
renderingIntent);
// Clean up
CGColorSpaceRelease(colorSpaceRef);
CGDataProviderRelease(provider);
// Convert to UIImage
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
CGImageRelease(imageRef);
return [image autorelease];
}
Make sure you bind the frame buffer before you do this, like so
glBindFramebufferOES(GL_FRAMEBUFFER_OES, myFrameBuffer);
glViewport(0, 0, myBackingWidth, myBackingHeight);
And call -glToUIImage
before the frame buffer is presented!
For further information Apple provides sample code for taking screenshots from OpenGL.
Upvotes: 3
Reputation: 781
Is this what you want ?
This will take a screenshot from the current context and framebuffer and save it to the photo album.
If you don't want to save to the photo album just get the resulting UIImage at the end.
Also remember to call this only after you've finished drawing, but before switching buffers.
Also if you're using MSAA, this has to be called after glResolveMultisampleFramebufferAPPLE and the new buffer bind.
#ifdef AUTOSCREENSHOT
// callback for CGDataProviderCreateWithData
void releaseData(void *info, const void *data, size_t dataSize) {
free((void*)data);
}
// callback for UIImageWriteToSavedPhotosAlbum
- (void)image:(UIImage *)image didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
NSLog(@"Save finished");
[image release];
}
-(void)saveCurrentScreenToPhotoAlbum {
int height = (int)screenSize.y*retina;
int width = (int)screenSize.x*retina;
NSInteger myDataLength = width * height * 4;
GLubyte *buffer = (GLubyte *) malloc(myDataLength);
GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
for(int y = 0; y <height; y++) {
for(int x = 0; x < width * 4; x++) {
buffer2[(int)((height - 1 - y) * width * 4 + x)] = buffer[(int)(y * 4 * width + x)];
}
}
free(buffer);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, releaseData);
int bitsPerComponent = 8;
int bitsPerPixel = 32;
int bytesPerRow = 4 * width;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
CGColorSpaceRelease(colorSpaceRef);
CGDataProviderRelease(provider);
UIImage *image = [[UIImage alloc] initWithCGImage:imageRef];
CGImageRelease(imageRef);
UIImageWriteToSavedPhotosAlbum(image, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
}
#endif
I use this code to save timed screenshots while I'm playing so I have god material to put in the app store.
Upvotes: 3