Reputation: 651
I'm trying to apply a simple Core Image Filter to the live camera input. I think my code is OK but using the method drawImage:inRect:fromRect
in the captureOutput method causes either an EXC_BAD_ACCESS
, or a [__NSCFNumber drawImage:inRect:fromRect:]: unrecognized
selector which makes me think that my context has been deallocated when I try to call drawImage on it. This does not make sense to me since my CIContext
is a class member.
The problem does not seem to come from OpenGL since I tried with a simple context (not created from a EAGLContext
) and I've got the same issue.
I'm testing it on an iphone 5 with ios 6 since the camera doesn't work on the simulator.
Could you help me on that ? Thank you very much for your time
I've got my .h file :
<!-- language: c# -->
// CameraController.h
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <AVFoundation/AVFoundation.h>
#import <GLKit/GLKit.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#import <QuartzCore/QuartzCore.h>
#import <CoreImage/CoreImage.h>
#import <ImageIO/ImageIO.h>
@interface CameraController : GLKViewController <AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *avCaptureSession;
CIContext *coreImageContext;
CIContext *ciTestContext;
GLuint _renderBuffer;
EAGLContext *glContext;
}
@end
and my .m file
<!-- language: c# -->
// CameraController.m
#import "CameraController.h"
@interface CameraController ()
@end
@implementation CameraController
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Initialize Open GL ES2 Context
glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!glContext) {
NSLog(@"Failed to create ES context");
}
[EAGLContext setCurrentContext:nil];
// Gets the GL View and sets the depth format to 24 bits, and the context of the view to be the Open GL context created above
GLKView *view = (GLKView *)self.view;
view.context = glContext;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
// Creates CI Context from EAGLContext
NSMutableDictionary *options = [[NSMutableDictionary alloc] init];
[options setObject: [NSNull null] forKey: kCIContextWorkingColorSpace];
coreImageContext = [CIContext contextWithEAGLContext:glContext options:options];
glGenRenderbuffers(1, &_renderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
// Initialize Video Capture Device
NSError *error;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
// Initialize Video Output object and set output settings
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// Delegates the SampleBuffer to the current object which implements the AVCaptureVideoDataOutputSampleBufferDelegate interface via the captureOutput method
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// Initialize the capture session, add input, output, start urnning
avCaptureSession = [[AVCaptureSession alloc] init];
[avCaptureSession beginConfiguration];
[avCaptureSession setSessionPreset:AVCaptureSessionPreset1280x720];
[avCaptureSession addInput:input];
[avCaptureSession addOutput:dataOutput];
[avCaptureSession commitConfiguration];
[avCaptureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// Creates a CIImage from the sample buffer of the camera frame
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *inputImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
// Creates the relevant filter
CIFilter *filter = [CIFilter filterWithName:@"CISepiaTone"];
[filter setValue:inputImage forKey:kCIInputImageKey];
[filter setValue:[NSNumber numberWithFloat:0.8f] forKey:@"InputIntensity"];
// Creates a reference to the output of the filter
CIImage *result = [filter valueForKey:kCIOutputImageKey];
// Draw to the context
[coreImageContext drawImage:result inRect:[result extent] fromRect:[result extent]]; // 5
[glContext presentRenderbuffer:GL_RENDERBUFFER];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
Upvotes: 0
Views: 1848
Reputation: 3541
In your viewDidLoad method, you have:
coreImageContext = [CIContext contextWithEAGLContext:glContext options:options];
coreImageContext needs to be retained if you want to use it in captureOutput method.
Upvotes: 2