Reputation: 417
I am trying to develop an app that analyse an audio stream from the microphone, using Core Audio and AVAudioSession on SDK 8.1. Using bits and pieces from apples docs as well as helpful bloggers, I've managed to put together a program which successfully
But the program failes to start the recording - the AudioOutputUnitStart
function failes with return code -500. Why is that? [The following code functions as is, if it is saved as ViewController.m
and used in a single page xcode template. Even if it is not minimal, I tried to keep it as small as possible].
#import "ViewController.h"
@import AVFoundation;
@import AudioUnit;
#define kInputBus 1
AudioComponentInstance *audioUnit = NULL;
float *convertedSampleBuffer = NULL;
int status = 0;
static OSStatus recordingCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
AudioBufferList *bufferList;
OSStatus status;
status = AudioUnitRender(*audioUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
bufferList);
printf("%d", status); printf("%s", " is the return code of AudioUnitRender from the recordingCallback.\n");
// DoStuffWithTheRecordedAudio(bufferList);
return noErr;
}
int myAudio() {
AVAudioSession *mySession = [AVAudioSession sharedInstance];
[mySession setCategory: AVAudioSessionCategoryRecord error: nil];
[mySession setMode: AVAudioSessionModeMeasurement error: nil];
[mySession setPreferredSampleRate:44100 error:nil];
[mySession setPreferredIOBufferDuration:0.02 error:nil];
[mySession setActive: YES error: nil];
audioUnit = (AudioUnit*)malloc(sizeof(AudioUnit));
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
status = AudioComponentInstanceNew(comp, audioUnit);
printf("%d", status); printf("%s", " is the return code of Instantiating a new audio component instance.\n");
UInt32 enable = 1;
status = AudioUnitSetProperty(*audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &enable, sizeof(enable));
printf("%d", status); printf("%s", " is the return code of EnablingIO on the audiounit.\n");
AudioStreamBasicDescription streamDescription = {0};
streamDescription.mSampleRate = 44100;
streamDescription.mFormatID = kAudioFormatLinearPCM;
streamDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
streamDescription.mFramesPerPacket = 1;
streamDescription.mChannelsPerFrame = 1;
streamDescription.mBitsPerChannel = 16;
streamDescription.mBytesPerPacket = 2;
streamDescription.mBytesPerFrame = 2;
status = AudioUnitSetProperty(*audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamDescription, sizeof(streamDescription));
printf("%d", status); printf("%s", " is the return code of setting the AudioStreamDescription.\n");
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = NULL;
status = AudioUnitSetProperty(*audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
printf("%d", status); printf("%s", " is the return code of setting the recording callback on the audiounit\n");
status = AudioUnitInitialize(*audioUnit);
printf("%d", status); printf("%s", " is the return code of initializing the audiounit.\n");
status = AudioOutputUnitStart(*audioUnit);
printf("%d", status); printf("%s", " is the return code of Starting the audioUnit\n");
return noErr;
}
@interface ViewController ()
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
myAudio();
[NSThread sleepForTimeInterval:1];
exit(0);
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
All printf
statements return 0, except the last one which returns -500.
Upvotes: 1
Views: 2457
Reputation: 417
There are two errors in the code provided in the question:
Here is a complete ViewController.m
that is capable of recording, and can playback as well if the commented blocks are activated (If you aim for playback and active the audio session block, then change category from Record
to PlayAndRecord
. Credits to Michael Tyson for publishing the base on which I builded.
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
@import AVFoundation;
@import AudioUnit;
#define kOutputBus 0
#define kInputBus 1
static OSStatus recordingCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
// This works, but can be optimised by moving this code out of the Callback function.
AudioBufferList *bufferList;
bufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList) + sizeof(AudioBuffer));
bufferList->mNumberBuffers = 1;
bufferList->mBuffers[0].mNumberChannels = 1;
bufferList->mBuffers[0].mDataByteSize = 1024 * 2;
bufferList->mBuffers[0].mData = calloc(1024, 2);
OSStatus status;
status = AudioUnitRender(inRefCon,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
bufferList);
if (status != noErr) {
NSLog(@"Error %ld", status);
} else {
NSLog(@"No Errors!");
printf("%d, ",(int)*((SInt16 *)bufferList->mBuffers[0].mData));
}
// Now, we have the samples we just read sitting in buffers in bufferList
// DoStuffWithTheRecordedAudio(bufferList);
return noErr;
}
//static OSStatus playbackCallback(void *inRefCon,
// AudioUnitRenderActionFlags *ioActionFlags,
// const AudioTimeStamp *inTimeStamp,
// UInt32 inBusNumber,
// UInt32 inNumberFrames,
// AudioBufferList *ioData) {
// // Notes: ioData contains buffers (may be more than one!)
// // Fill them up as much as you can. Remember to set the size value in each buffer to match how
// // much data is in the buffer.
// return noErr;
//}
- (void)myAudio {
OSStatus status;
AudioComponentInstance audioUnit;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &audioUnit);
// Enable IO for recording
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&flag,
sizeof(flag));
//// Enable IO for playback
//status = AudioUnitSetProperty(audioUnit,
// kAudioOutputUnitProperty_EnableIO,
// kAudioUnitScope_Output,
// kOutputBus,
// &flag,
// sizeof(flag));
// Describe format
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
// Apply format
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormat,
sizeof(audioFormat));
//status = AudioUnitSetProperty(audioUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Input,
// kOutputBus,
// &audioFormat,
// sizeof(audioFormat));
//
// Set input callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = audioUnit;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
//// Set output callback
//callbackStruct.inputProc = playbackCallback;
//callbackStruct.inputProcRefCon = audioUnit;
//status = AudioUnitSetProperty(audioUnit,
// kAudioUnitProperty_SetRenderCallback,
// kAudioUnitScope_Global,
// kOutputBus,
// &callbackStruct,
// sizeof(callbackStruct));
// Initialise
status = AudioUnitInitialize(audioUnit);
// Start
status = AudioOutputUnitStart(audioUnit);
NSLog(@"Starting returned code %ld", status);
// // It is not necessary to have a session, but if you have one, it must came after the setup of the audiounit.
// NSError *error = nil;
// // Configure & activate audio session
//
// AVAudioSession *session = [AVAudioSession sharedInstance];
//
// if (![session setCategory:AVAudioSessionCategoryRecord error:&error]) NSLog(@"Error configuring session category: %@", error);
// if (![session setMode:AVAudioSessionModeMeasurement error:&error]) NSLog(@"Error configuring session mode: %@", error);
// if (![session setActive:YES error:&error]) NSLog(@"Error activating audio session: %@", error);
//
// NSLog(@"Session activated. sample rate %f", session.sampleRate);
// NSLog(@"Number of channels %d", session.inputNumberOfChannels);
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self myAudio];
[NSThread sleepForTimeInterval:1];
exit(0);
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
Upvotes: 1