Reputation: 1
My code works well for one image file. But if there are more images ,images are the screen captured and png formatted images, I face this error. Video writing failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSLocalizedFailureReason=An unknown error occurred (-16364), NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x600000c78150 {Error Domain=NSOSStatusErrorDomain Code=-16364 "(null)"}} How to fix it?
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <AppKit/AppKit.h>
#import <CoreImage/CoreImage.h> // Import CoreImage framework explicitly
@interface VideoCreator : NSObject
- (void)createH264VideoFromPNGImages;
@end
@implementation VideoCreator
- (void)createH264VideoFromPNGImages {
// Specify the folder path containing the PNG images
NSString *imageFolderPath = @"/var/folders/example/path/";
// Specify the output file path and name for the H.264 video
NSString *outputPath = @"/var/folders/example/result.mp4";
// Get a list of files in the image folder
NSArray *imageFiles = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:imageFolderPath error:nil];
// Create an AVAssetWriter to write the video file
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:outputPath]
fileType:AVFileTypeMPEG4
error:&error];
if (error) {
NSLog(@"Failed to create AVAssetWriter: %@", error);
return;
}
// Define the video settings
NSDictionary *videoSettings = @{
AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoWidthKey: @2880, // Specify the desired video width
AVVideoHeightKey: @1800, // Specify the desired video height
};
// Create an AVAssetWriterInput with the video settings
AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
// Create a pixel buffer attributes dictionary
NSDictionary *pixelBufferAttributes = @{
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
(id)kCVPixelBufferWidthKey: @2880, // Specify the same width as video settings
(id)kCVPixelBufferHeightKey: @1800, // Specify the same height as video settings
};
// Create an AVAssetWriterInputPixelBufferAdaptor with the pixel buffer attributes
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelBufferAttributes];
// Add the video input to the asset writer
[videoWriter addInput:videoWriterInput];
// Start writing the video
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
// Iterate through the image files and append them to the video
for (NSString *imageFile in imageFiles) {
NSString *imageFilePath = [imageFolderPath stringByAppendingPathComponent:imageFile];
NSImage *image = [[NSImage alloc] initWithContentsOfFile:imageFilePath];
CIImage *ciImage = [[CIImage alloc] initWithData:[image TIFFRepresentation]];
CVPixelBufferRef pixelBuffer = [self pixelBufferFromCIImage:ciImage];
// Append the pixel buffer to the asset writer input
[pixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:CMTimeMake(1, 30)];
// Release the pixel buffer
CVPixelBufferRelease(pixelBuffer);
}
// Finish writing the video
[videoWriterInput markAsFinished];
[videoWriter endSessionAtSourceTime:CMTimeMake(imageFiles.count, 30)];
[videoWriter finishWritingWithCompletionHandler:^{
// Handle the completion of video writing
if (videoWriter.status == AVAssetWriterStatusCompleted) {
NSLog(@"Video writing completed successfully!");
} else if (videoWriter.status == AVAssetWriterStatusFailed) {
NSLog(@"Video writing failed: %@", videoWriter.error);
} else if (videoWriter.status == AVAssetWriterStatusCancelled) {
NSLog(@"Video writing cancelled.");
} else {
NSLog(@"Video writing encountered an unknown error.");
}
}];
}
- (CVPixelBufferRef)pixelBufferFromCIImage:(CIImage *)image {
// Create a pixel buffer pool
CVPixelBufferPoolRef pixelBufferPool;
NSDictionary *pixelBufferAttributes = @{
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
(id)kCVPixelBufferWidthKey: @2880, // Specify the desired width
(id)kCVPixelBufferHeightKey: @1800, // Specify the desired height
(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
(id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES
};
CVPixelBufferPoolCreate(nil, nil, (__bridge CFDictionaryRef)pixelBufferAttributes, &pixelBufferPool);
// Create a pixel buffer
CVPixelBufferRef pixelBuffer;
CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &pixelBuffer);
// Lock the pixel buffer
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
// Get the pixel buffer data
void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
// Create a Core Graphics context
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CGContextRef context = CGBitmapContextCreate(pixelData, width, height, 8, bytesPerRow, CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst);
// Render the CIImage into the pixel buffer
NSGraphicsContext *nsGraphicsContext = [NSGraphicsContext graphicsContextWithCGContext:context flipped:NO];
[NSGraphicsContext setCurrentContext:nsGraphicsContext];
CIContext *ciContext = [CIContext contextWithCGContext:context options:nil];
[ciContext drawImage:image inRect:CGRectMake(0, 0, width, height) fromRect:[image extent]];
// Unlock the pixel buffer and release the Core Graphics context
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CGContextRelease(context);
return pixelBuffer;
}
@end
int main(int argc, const char * argv[]) {
@autoreleasepool {
VideoCreator *videoCreator = [[VideoCreator alloc] init];
[videoCreator createH264VideoFromPNGImages];
}
return 0;
}
After convert one image to mp4 file, I 'd checked the mp4 file with QuickTime Player. If there are only even 2 images , code does not work.
Upvotes: 0
Views: 139
Reputation: 36
Don't create a new pixelBufferPool in pixelBufferFromCIImage function. Let's try using pixelBufferPool in pixelBufferAdaptor which you created in createH264VideoFromPNGImages function.
Beside that, why don't you use Core Image to convert CIImage to CVPixelBuffer by CIContext directly? I think using Core graphics to convert is bad performnace
https://developer.apple.com/documentation/coreimage/cicontext/1437853-render
Upvotes: 1