Reputation: 35
so currently i'm building some sort of Siri that sent data through a ws connection. But unfortunately, everytime i switched between not using airpods and using airpods, the simmulator crashes
Thread 1: "required condition is false: format.sampleRate == hwFormat.sampleRate"
So i tried playing with commonFormat: .pcmFormatFloat32 to formatFloat or the bus, still have no idea why
could not found the root cause of this crash. any ideas? thanks
Here's my code
final class AudioRecorder: Recorder, ObservableObject {
enum RecordingState {
case recording, paused, stopped
}
private var engine: AVAudioEngine!
private var mixerNode: AVAudioMixerNode!
@Published private(set) var state: RecordingState = .stopped
private let audioBus = 0
private let bufferSize: AVAudioFrameCount = 1024
private let sampleRate = 44100.0
private var audioBuffers = [AVAudioPCMBuffer]()
@Published private(set) var combinedBuffer: AudioRecordResult?
func createEngine() {
setupSession()
setupEngine()
}
fileprivate func setupSession() {
let session = AVAudioSession.sharedInstance()
try? session.setCategory(.record)
try? session.setActive(true, options: .notifyOthersOnDeactivation)
}
fileprivate func setupEngine() {
engine = AVAudioEngine()
mixerNode = AVAudioMixerNode()
mixerNode.volume = 0
engine.attach(mixerNode)
makeConnections()
engine.prepare()
}
fileprivate func makeConnections() {
let inputNode = engine.inputNode
let inputFormat: AVAudioFormat = inputNode.outputFormat(forBus: 0)
engine.connect(inputNode, to: mixerNode, format: inputFormat)
let mainMixerNode = engine.mainMixerNode
let mixerFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: inputFormat.sampleRate, channels: 1, interleaved: false)
engine.connect(mixerNode, to: mainMixerNode, format: mixerFormat)
}
func startRecording() throws {
let tapNode: AVAudioNode = mixerNode
let format = tapNode.outputFormat(forBus: 0)
let startTime = DispatchTime.now()
DispatchQueue.global(qos: .background).async {
tapNode.installTap(onBus: 0, bufferSize: 4096, format: format, block: { [weak self]
(buffer, time) in
guard let self = self else { return }
self.audioBuffers.append(buffer)
let currentTime = DispatchTime.now()
let elapsedTime = currentTime.uptimeNanoseconds - startTime.uptimeNanoseconds
let isFinal = elapsedTime > 5 * 1_000_000_000
self.combineBuffers(isFinal: isFinal)
if isFinal {
stopRecording()
}
})
}
try engine.start()
state = .recording
}
private func combineBuffers(isFinal: Bool) {
guard !audioBuffers.isEmpty else { return }
let format = audioBuffers[0].format
let totalFrameLength = audioBuffers.reduce(0) { $0 + $1.frameLength }
guard let combinedBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: totalFrameLength) else { return }
combinedBuffer.frameLength = totalFrameLength
var frameOffset: AVAudioFrameCount = 0
for buffer in audioBuffers {
for channel in 0..<Int(buffer.format.channelCount) {
let src = buffer.floatChannelData![channel]
let dst = combinedBuffer.floatChannelData![channel] + Int(frameOffset)
memcpy(dst, src, Int(buffer.frameLength) * MemoryLayout<Float>.size)
}
frameOffset += buffer.frameLength
}
self.combinedBuffer = .success(buffer: combinedBuffer, final: isFinal)
}
func stopRecording() {
mixerNode.removeTap(onBus: 0)
engine.stop()
if case .success(let buffer, _) = self.combinedBuffer {
self.combinedBuffer = .success(buffer: buffer, final: true)
}
self.audioBuffers = []
state = .stopped
}
}
Upvotes: 1
Views: 32