Reputation: 111
I'm trying to make an app that displays the camera preview, then in some conditions begins recording this with voice input and finally repeats the recorded movie.
I've already written the classes for previewing/recording/replaying and the controller that manages their coordination.
It seems that these functions work perfectly when called independently, however I can't get to make them work together: when replaying the video, the sound runs but the image takes about five seconds to be displayed and then stutters.
Here's my code for this:
Previewing:
- (void) createSession
{
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID];
if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cVideoInput];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
error = nil;
_cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cAudioInput];
_cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_cameraLayer.frame = self.bounds;
[self.layer addSublayer:_cameraLayer];
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset640x480];
[_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]];
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME, NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_videoOutput];
[_audioOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_audioOutput];
dispatch_set_context(queue, self);
dispatch_set_finalizer_f(queue, queue_finalizer);
dispatch_release(queue);
[_session startRunning];
}
- (void) deleteSession
{
[_session stopRunning];
[(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil];
[_cameraLayer removeFromSuperlayer];
[_cameraLayer release];
_cameraLayer = nil;
[_audioOutput setSampleBufferDelegate:nil queue:NULL];
[_videoOutput setSampleBufferDelegate:nil queue:NULL];
[_audioOutput release];
_audioOutput = nil;
[_videoOutput release];
_videoOutput = nil;
[_cAudioInput release];
_cAudioInput = nil;
[_cVideoInput release];
_cVideoInput = nil;
NSArray *inputs = [_session inputs];
for (AVCaptureInput *input in inputs)
[_session removeInput:input];
NSArray *outputs = [_session outputs];
for (AVCaptureOutput *output in outputs)
[_session removeOutput:output];
[_session release];
_session = nil;
}
Recording:
- (void) createWriter
{
NSString *file = [self file];
if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL];
NSError *error = nil;
_writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain];
if (error)
{
[_writer release];
_writer = nil;
NSLog(@"%@", error);
return;
}
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
[NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey,
nil ];
_wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain];
[_writer addInput:_wAudioInput];
settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
_wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain];
[_writer addInput:_wVideoInput];
}
- (void) deleteWriter
{
[_wVideoInput release];
_wVideoInput = nil;
[_wAudioInput release];
_wAudioInput = nil;
[_writer release];
_writer = nil;
}
- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wAudioInput isReadyForMoreMediaData])
[_wAudioInput appendSampleBuffer:sampleBuffer];
}
- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wVideoInput isReadyForMoreMediaData])
[_wVideoInput appendSampleBuffer:sampleBuffer];
}
Playing:
- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
AVPlayerItem *item = (AVPlayerItem *)object;
[item removeObserver:self forKeyPath:@"status"];
switch (item.status)
{
case AVPlayerItemStatusReadyToPlay:
[_player seekToTime:kCMTimeZero];
[_player play];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
break;
case AVPlayerItemStatusUnknown:
case AVPlayerItemStatusFailed:
break;
default:
break;
}
}
- (void) finishPlaying:(NSNotification *)notification
{
[_player pause];
[_playerLayer removeFromSuperlayer];
[_playerLayer release];
_playerLayer = nil;
[_player release];
_player = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}
- (void) play:(NSString *)path
{
_player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];
_playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];
_playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2, 0, 0, 1), 1, -1, 1);
_playerLayer.frame = self.bounds;
[self.layer addSublayer:_playerLayer];
[_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL];
}
Upvotes: 4
Views: 3088
Reputation: 6771
I was having a similar issue, and while this did not fix it completely it did help quite a bit: Instead of just losing the sample buffers if you aren't ready to write again, try letting the device sleep a little and then re-evaluating if you can write the data.
- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if (!_wVideoInput.readyForMoreMediaData && _isRecording)
{
[self performSelector:@selector(RecordingVideoWithBuffer:) withObject:(__bridge id)(sampleBuffer) afterDelay:0.05];
return;
}
[_wVideoInput appendSampleBuffer:sampleBuffer];
If you aren't using ARC, just sampleBuffer should. But ARC requires the __bridge addition.
EDIT: I used performSelector and return as opposed to a while loop with NSThread sleep because it's non-blocking. Blocking sucks.
Upvotes: 1