CGContextDrawImage:无效上下文 0x0。无法将 EAGLDrawable 绑定到 GL_RENDERBUFFER 2

问题描述 投票:0回答:1

我正在我的 iOS 项目中开发一个自定义 UIView 子类,其中涉及使用 GLKView 和 CAEAGLLayer 进行相机渲染和应用美颜滤镜。主要目标是从相机捕获视频,使用美颜滤镜对其进行处理,并在自定义视图中渲染输出。但是,我遇到了与设置 CAEAGLLayer 的可绘制属性和正确渲染视频帧相关的几个问题。

以下是代码:

#import "FUCameraPlatformView.h"
#import "OverlayView.h"
#import "GLView.h" 
#import <GLKit/GLKit.h>

@interface FUCameraPlatformView() <FUCameraDelegate>
@property (nonatomic, strong) EAGLContext *glContext;
@property (nonatomic, strong) GLKView *glView;
@property (nonatomic, strong) CIContext *ciContext;
@property (nonatomic, strong) GLView *view; 
@property (nonatomic, strong) FUCamera *camera;
@property (nonatomic, strong) MHBeautyManager *beautyManager;
@property (nonatomic , strong) OverlayView *overlay;
@end

@implementation FUCameraPlatformView

- (instancetype)initWithFrame:(CGRect)frame
               viewIdentifier:(int64_t)viewId
                    arguments:(id)args
               cameraInstance:(FUCamera *)camera
              binaryMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
                beautyManager:(MHBeautyManager *)manager {
    self = [super init];
    if (self) {
        _beautyManager = manager;
        _beautyManager.delegate = self;
        _camera = camera;
        _camera.delegate = self;
       _view = [[GLView alloc] initWithFrame:frame];
        [self setupGL];
        [self setupCamera];
    }
    return self;
}

- (void)setupGL {
    _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    _glView = [[GLKView alloc] initWithFrame:_view.bounds context:self.glContext];
    _glView.enableSetNeedsDisplay = NO;
    _glView.context = self.glContext;
    _glView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
    _ciContext = [CIContext contextWithEAGLContext:_glContext];

    // Ensure the drawable properties are set correctly
    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)_view.layer;
    eaglLayer.opaque = YES;
    eaglLayer.contentsScale = [UIScreen mainScreen].scale;
    eaglLayer.drawableProperties = @{
        kEAGLDrawablePropertyRetainedBacking: [NSNumber numberWithBool:NO],
        kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
    };

    GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
    if (status != GL_FRAMEBUFFER_COMPLETE) {
        NSLog(@"Failed to make complete framebuffer object %x", status);
    }

    if (![EAGLContext setCurrentContext:self.glContext]) {
        NSLog(@"Failed to set current OpenGL context.");
        return;
    }
    
    [_view addSubview:_glView];

    _overlay = [[OverlayView alloc] initWithFrame:_view.bounds];
    _overlay.backgroundColor = [UIColor clearColor];
    [_view addSubview:_overlay];
}


- (void)showFaceLandmarksAndFaceRectWithPersonsArray:(NSMutableArray *)arrPersons {
    if (_overlay.hidden) {
        _overlay.hidden = NO;
    }
    _overlay.arrPersons = arrPersons;
    [_overlay setNeedsDisplay];
    [_overlay layoutIfNeeded];
}

- (void)setupCamera {
    [self.camera startCapture];
}

- (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    // Check for null sample buffer
    if (!sampleBuffer) {
        return;
    }

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    if (!imageBuffer) {
        return;
    }

    OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);



    CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
    if (!image) {
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
        return;
    }

    CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
    if (!filter) {
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
        return;
    }
    [filter setValue:image forKey:kCIInputImageKey];
    [_glView bindDrawable];

    CGFloat width = _glView.drawableHeight / 640.f * 480.f;
    CGFloat height = _glView.drawableHeight;
    CGFloat x = (_glView.drawableWidth - width) / 2;

    if (_ciContext) {
        [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
    }

    if (self.beautyManager) {
        [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
    }
    [_glView display];

    
    // CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}


// - (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
//     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//     OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
//     CVPixelBufferLockBaseAddress(imageBuffer, 0);
//     CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
//     CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
//     [filter setValue:image forKey:kCIInputImageKey];
//     [_glView bindDrawable];
//     CGFloat width = _glView.drawableHeight/640.f*480.f;
//     CGFloat height = _glView.drawableHeight;
//     CGFloat x = (_glView.drawableWidth - width) /2;
//     [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
//     [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
//     [_glView display];

// }

- (UIView *)view {
    return _view;
}

@end
ios objective-c flutter opengl-es glkit
1个回答
0
投票

解决方案

显然GLKView已经是一个UIView了。不擅长解释事情,但这解决了问题。

#import "FUCameraPlatformView.h"
#import "OverlayView.h"
#import "GLView.h" 
#import <GLKit/GLKit.h>

@interface FUCameraPlatformView()
@property (nonatomic, strong) EAGLContext *glContext;
@property (nonatomic, strong) GLKView *glView;
@property (nonatomic, strong) CIContext *ciContext;
@property (nonatomic, strong) FUCamera *camera;
@property (nonatomic, strong) MHBeautyManager *beautyManager;
@property (nonatomic , strong) OverlayView *overlay;
@property (nonatomic, assign) GLuint framebuffer;
@property (nonatomic, assign) GLuint colorRenderbuffer;
@property (nonatomic, strong) dispatch_queue_t captureQueue;
@end

@implementation FUCameraPlatformView

- (instancetype)initWithFrame:(CGRect)frame
               viewIdentifier:(int64_t)viewId
                    arguments:(id)args
              binaryMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
                beautyManager:(MHBeautyManager *)manager 
               cameraInstance:(FUCamera *)camera{
    self = [super init];
    if (self) {
         _containerView = [[UIView alloc] initWithFrame:frame];
           // Initialize the OpenGL context
        _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        if (!_glContext) {
            NSLog(@"Failed to create ES context");
            return nil;
        }
        if (![EAGLContext setCurrentContext:_glContext]) {
            NSLog(@"Failed to set current OpenGL context.");
            return nil;
        }

        // Initialize the GLKView
        _glView = [[GLKView alloc] initWithFrame:frame context:_glContext];
        _glView.context = _glContext;
        // _glView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
        //  _glView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; 

        // Initialize the CIContext
        _ciContext = [CIContext contextWithEAGLContext:_glContext];


        // Add overlay view
        _overlay = [[OverlayView alloc] initWithFrame:_glView.bounds];
        _overlay.backgroundColor = [UIColor clearColor];
         [_glView addSubview:_overlay];

        // Setup the camera and beauty manager
        _camera = camera;
        _camera.delegate = self;
        _beautyManager = manager;
        _captureQueue = dispatch_queue_create("com.faceunity.videoCaptureQueue", DISPATCH_QUEUE_SERIAL);
        // Start the camera
        [self setupCamera];

    }
    return self;
}

- (void)showFaceLandmarksAndFaceRectWithPersonsArray:(NSMutableArray *)arrPersons {
    if (_overlay.hidden) {
        _overlay.hidden = NO;
    }
    _overlay.arrPersons = arrPersons;
    [_overlay setNeedsDisplay];
    [_overlay layoutIfNeeded];
}

- (void)setupCamera {
    [_camera startCapture];
}


- (void)didOutputVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    OSType formatType = CVPixelBufferGetPixelFormatType(imageBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    [self.beautyManager processWithPixelBuffer:imageBuffer formatType:formatType];
    CIImage *image = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectChrome" keysAndValues:kCIInputImageKey, image, nil];
    [filter setValue:image forKey:kCIInputImageKey];
    [_glView bindDrawable];
    CGFloat width = _glView.drawableHeight/640.f*480.f;
    CGFloat height = _glView.drawableHeight;
    CGFloat x = (_glView.drawableWidth - width) /2;
    [_ciContext drawImage:filter.outputImage inRect:CGRectMake(x, 0, width, height) fromRect:CGRectMake(0, 0, 480, 640)];
    [_glView display];

}

- (UIView *)view {
    return _glView;
}

- (void)dealloc {
    [self disposeAll];
}

- (void)disposeAll {
    [_camera stopCapture];
    _camera.delegate = nil;
    _camera = nil;
    [_beautyManager releaseSession];
    _beautyManager = nil;

    [EAGLContext setCurrentContext:_glContext];
    glDeleteFramebuffers(1, &_framebuffer);
    glDeleteRenderbuffers(1, &_colorRenderbuffer);
    [EAGLContext setCurrentContext:nil];
    _glContext = nil;
}

@end
© www.soinside.com 2019 - 2024. All rights reserved.