Georgina
Georgina

Reputation: 852

CGContextDrawImage: invalid context 0x0. Failed to bind EAGLDrawable to GL_RENDERBUFFER 2

I'm working on a custom UIView subclass in my iOS project that involves using GLKView and CAEAGLLayer for camera rendering and applying beauty filters. The main goal is to capture video from the camera, process it with beauty filters, and render the output in a custom view. However, I'm encountering several issues related to setting the drawable properties of CAEAGLLayer and rendering the video frames correctly.

Below is the code:

#import "FUCameraPlatformView.h"
#import "OverlayView.h"
#import "GLView.h" 
#import <GLKit/GLKit.h>

@interface FUCameraPlatformView() <FUCameraDelegate>
@property (nonatomic, strong) EAGLContext *glContext;
@property (nonatomic, strong) GLKView *glView;
@property (nonatomic, strong) CIContext *ciContext;
@property (nonatomic, strong) GLView *view; 
@property (nonatomic, strong) FUCamera *camera;
@property (nonatomic, strong) MHBeautyManager *beautyManager;
@property (nonatomic , strong) OverlayView *overlay;
@end

@implementation FUCameraPlatformView

- (instancetype)initWithFrame:(CGRect)frame
               viewIdentifier:(int64_t)viewId
                    arguments:(id)args
               cameraInstance:(FUCamera *)camera
              binaryMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
                beautyManager:(MHBeautyManager *)manager {
    self = [super init];
    if (self) {
        _beautyManager = manager;
        _beautyManager.delegate = self;
        _camera = camera;
        _camera.delegate = self;
       _view = [[GLView alloc] initWithFrame:frame];
        [self setupGL];
        [self setupCamera];
    }
    return self;
}

- (void)setupGL {
    _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    _glView = [[GLKView alloc] initWithFrame:_view.bounds context:self.glContext];
    _glView.enableSetNeedsDisplay = NO;
    _glView.context = self.glContext;
    _glView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
    _ciContext = [CIContext contextWithEAGLContext:_glContext];

    // Ensure the drawable properties are set correctly
    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)_view.layer;
    eaglLayer.opaque = YES;
    eaglLayer.contentsScale = [UIScreen mainScreen].scale;
    eaglLayer.drawableProperties = @{
        kEAGLDrawablePropertyRetainedBacking: [NSNumber numberWithBool:NO],
        kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
    };

    GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
    if (status != GL_FRAMEBUFFER_COMPLETE) {
        NSLog(@"Failed to make complete framebuffer object %x", status);
    }

    if (![EAGLContext setCurrentContext:self.glContext]) {
        NSLog(@"Failed to set current OpenGL context.");
        return;
    }
    
    [_view addSubview:_glView];

    _overlay = [[OverlayView alloc] initWithFrame:_view.bounds];
    _overlay.backgroundColor = [UIColor clearColor];
    [_view addSubview:_overlay];
}


- (void)showFaceLandmarksAndFaceRectWithPersonsArray:(NSMutableArray *)arrPersons {
    if (_overlay.hidden) {
        _overlay.hidden = NO;
    }
    _overlay.arrPersons = arrPersons;
    [_overlay setNeedsDisplay];
    [_overlay layoutIfNeeded];
}

- (void)setupCamera {
    [self.camera startCapture];
}

- (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    // Check for null sample buffer
    if (!sampleBuffer) {
        return;
    }

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    if (!imageBuffer) {
        return;
    }

    OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);



    CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
    if (!image) {
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
        return;
    }

    CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
    if (!filter) {
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
        return;
    }
    [filter setValue:image forKey:kCIInputImageKey];
    [_glView bindDrawable];

    CGFloat width = _glView.drawableHeight / 640.f * 480.f;
    CGFloat height = _glView.drawableHeight;
    CGFloat x = (_glView.drawableWidth - width) / 2;

    if (_ciContext) {
        [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
    }

    if (self.beautyManager) {
        [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
    }
    [_glView display];

    
    // CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}


// - (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
//     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//     OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
//     CVPixelBufferLockBaseAddress(imageBuffer, 0);
//     CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
//     CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
//     [filter setValue:image forKey:kCIInputImageKey];
//     [_glView bindDrawable];
//     CGFloat width = _glView.drawableHeight/640.f*480.f;
//     CGFloat height = _glView.drawableHeight;
//     CGFloat x = (_glView.drawableWidth - width) /2;
//     [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
//     [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
//     [_glView display];

// }

- (UIView *)view {
    return _view;
}

@end

Upvotes: 1

Views: 30

Answers (1)

Georgina
Georgina

Reputation: 852

Solution

Apparently the GLKView is already a UIView. Not good at explaining things but this solved the problem.

#import "FUCameraPlatformView.h"
#import "OverlayView.h"
#import "GLView.h" 
#import <GLKit/GLKit.h>

@interface FUCameraPlatformView()
@property (nonatomic, strong) EAGLContext *glContext;
@property (nonatomic, strong) GLKView *glView;
@property (nonatomic, strong) CIContext *ciContext;
@property (nonatomic, strong) FUCamera *camera;
@property (nonatomic, strong) MHBeautyManager *beautyManager;
@property (nonatomic , strong) OverlayView *overlay;
@property (nonatomic, assign) GLuint framebuffer;
@property (nonatomic, assign) GLuint colorRenderbuffer;
@property (nonatomic, strong) dispatch_queue_t captureQueue;
@end

@implementation FUCameraPlatformView

- (instancetype)initWithFrame:(CGRect)frame
               viewIdentifier:(int64_t)viewId
                    arguments:(id)args
              binaryMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
                beautyManager:(MHBeautyManager *)manager 
               cameraInstance:(FUCamera *)camera{
    self = [super init];
    if (self) {
         _containerView = [[UIView alloc] initWithFrame:frame];
           // Initialize the OpenGL context
        _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        if (!_glContext) {
            NSLog(@"Failed to create ES context");
            return nil;
        }
        if (![EAGLContext setCurrentContext:_glContext]) {
            NSLog(@"Failed to set current OpenGL context.");
            return nil;
        }

        // Initialize the GLKView
        _glView = [[GLKView alloc] initWithFrame:frame context:_glContext];
        _glView.context = _glContext;
        // _glView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
        //  _glView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; 

        // Initialize the CIContext
        _ciContext = [CIContext contextWithEAGLContext:_glContext];


        // Add overlay view
        _overlay = [[OverlayView alloc] initWithFrame:_glView.bounds];
        _overlay.backgroundColor = [UIColor clearColor];
         [_glView addSubview:_overlay];

        // Setup the camera and beauty manager
        _camera = camera;
        _camera.delegate = self;
        _beautyManager = manager;
        _captureQueue = dispatch_queue_create("com.faceunity.videoCaptureQueue", DISPATCH_QUEUE_SERIAL);
        // Start the camera
        [self setupCamera];

    }
    return self;
}

- (void)showFaceLandmarksAndFaceRectWithPersonsArray:(NSMutableArray *)arrPersons {
    if (_overlay.hidden) {
        _overlay.hidden = NO;
    }
    _overlay.arrPersons = arrPersons;
    [_overlay setNeedsDisplay];
    [_overlay layoutIfNeeded];
}

- (void)setupCamera {
    [_camera startCapture];
}


- (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
    CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
    [filter setValue:image forKey:kCIInputImageKey];
    [_glView bindDrawable];
    CGFloat width = _glView.drawableHeight/640.f*480.f;
    CGFloat height = _glView.drawableHeight;
    CGFloat x = (_glView.drawableWidth - width) /2;
    [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
    [_glView display];

}

- (UIView *)view {
    return _glView;
}

- (void)dealloc {
    [self disposeAll];
}

- (void)disposeAll {
    [_camera stopCapture];
    _camera.delegate = nil;
    _camera = nil;
    [_beautyManager releaseSession];
    _beautyManager = nil;

    [EAGLContext setCurrentContext:_glContext];
    glDeleteFramebuffers(1, &_framebuffer);
    glDeleteRenderbuffers(1, &_colorRenderbuffer);
    [EAGLContext setCurrentContext:nil];
    _glContext = nil;
}

@end

Upvotes: 0

Related Questions