Reputation: 654
I have this code in Swift 3 and my output is 0.0 zeros most of the time and rarely I see very small numbers to the e^-50
The fileURL is a recording.caf with sound in it.
Does anyone know what's up?
func readBuff(_ fileURL:CFURL) {
var fileRef:ExtAudioFileRef? = nil
let openStatus = ExtAudioFileOpenURL(fileURL , &fileRef)
guard openStatus == noErr else {
print("Failed to open audio file '\(fileURL)' with error \(openStatus)")
return
}
var audioFormat2 = AudioStreamBasicDescription()
audioFormat2.mSampleRate = 44100; // GIVE YOUR SAMPLING RATE
audioFormat2.mFormatID = kAudioFormatLinearPCM;
audioFormat2.mFormatFlags = kLinearPCMFormatFlagIsFloat;
audioFormat2.mBitsPerChannel = UInt32(MemoryLayout<Float32>.size) * 8
audioFormat2.mChannelsPerFrame = 1; // Mono
audioFormat2.mBytesPerFrame = audioFormat2.mChannelsPerFrame * UInt32(MemoryLayout<Float32>.size); // == sizeof(Float32)
audioFormat2.mFramesPerPacket = 1;
audioFormat2.mBytesPerPacket = audioFormat2.mFramesPerPacket * audioFormat2.mBytesPerFrame; // = sizeof(Float32)
//apply audioFormat2 to the extended audio file
ExtAudioFileSetProperty(fileRef!, kExtAudioFileProperty_ClientDataFormat,UInt32(MemoryLayout<AudioStreamBasicDescription>.size),&audioFormat2)
let numSamples = 1024 //How many samples to read in at a startTime
let sizePerPacket:UInt32 = audioFormat2.mBytesPerPacket // sizeof(Float32) = 32 byts
let packetsPerBuffer:UInt32 = UInt32(numSamples)
let outputBufferSize:UInt32 = packetsPerBuffer * sizePerPacket //4096
//so the 1 value of outputbuffer is a the memory location where we have reserved space
let outputbuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: MemoryLayout<UInt8>.size * Int(outputBufferSize))
var convertedData = AudioBufferList()
convertedData.mNumberBuffers = 1 //set this for Mono
convertedData.mBuffers.mNumberChannels = audioFormat2.mChannelsPerFrame // also = 1
convertedData.mBuffers.mDataByteSize = outputBufferSize
convertedData.mBuffers.mData = UnsafeMutableRawPointer(outputbuffer)
var frameCount:UInt32 = UInt32(numSamples)
while (frameCount > 0) {
Utility.check(ExtAudioFileRead(fileRef!,
&frameCount,
&convertedData),
operation: "Couldn't read from input file")
if frameCount == 0 {
Swift.print("done reading from file")
return
}
var arrayFloats:[Float] = []
let ptr = convertedData.mBuffers.mData?.assumingMemoryBound(to: Float.self)
var j = 0
var floatDataArray:[Double] = [882000]// SPECIFY YOUR DATA LIMIT MINE WAS 882000 , SHOULD BE EQUAL TO OR MORE THAN DATA LIMIT
if(frameCount > 0){
var audioBuffer:AudioBuffer = convertedData.mBuffers
let floatArr = UnsafeBufferPointer(start: audioBuffer.mData?.assumingMemoryBound(to: Float.self), count: 882000)
for i in 0...1024{
//floatDataArray[j] = Double(floatArr[i]) //put your data into float array
// print("\(floatDataArray[j])")
floatDataArray.append(Double(floatArr[i]))
print(Float((ptr?[i])!))
j += 1
}
// print(floatDataArray)
}
}
}
I'm reading from
guard let fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, "./output.caf" as CFString!, .cfurlposixPathStyle, false) else {
// unable to create file
exit(-1)
}
steps after recording:
Swift.print("Recording, press <return> to stop:\n")
// wait for a key to be pressed
getchar()
// end recording
Swift.print("* recording done *\n")
recorder.running = false
// stop the Queue
Utility.check(AudioQueueStop(queue!, true),
operation: "AudioQueueStop failed")
// a codec may update its magic cookie at the end of an encoding session
// so reapply it to the file now
Utility.applyEncoderCookie(fromQueue: queue!, toFile: recorder.recordFile!)
// cleanup
AudioQueueDispose(queue!, true)
AudioFileClose(recorder.recordFile!)
readBuff(fileURL)
Upvotes: 1
Views: 647
Reputation: 36072
You're setting up your ExtAudioFile
and its client format, but you're not actually reading from it (with ExtAudioFileRead
), so your "output" is actually uninitialised, and in your case, very small.
Upvotes: 2