coder
coder

Reputation: 411

How to implement band stop filter using AVAudioEngine

I am building an app that needs to perform analysis on the audio it receives from the microphone in real time. In my app, I also need to play a beep sound and start recording audio at the same time, in other words, I can't play the beep sound and then start recording. This introduces the problem of hearing the beep sound in my recording, (this might be because I am playing the beep sound through the speaker, but unfortunately I cannot compromise in this regard either). Since the beep sound is just a tone of about 2350 kHz, I was wondering how I could exclude that range of frequencies (say from 2300 kHz to 2400 kHz) in my recordings and prevent it from influencing my audio samples. After doing some googling I came up with what I think might be the solution, a band stop filter. According to Wikipedia: "a band-stop filter or band-rejection filter is a filter that passes most frequencies unaltered, but attenuates those in a specific range to very low levels". This seems like what I need to to exclude frequencies from 2300 kHz to 2400 kHz in my recordings (or at least for the first second of the recording while the beep sound is playing). My question is: how would I implement this with AVAudioEngine? Is there a way I can turn off the filter after the first second of the recording when the beep sound is done playing without stopping the recording?

Since I am new to working with audio with AVAudioEngine (I've always just stuck to the higher levels of AVFoundation) I followed this tutorial to help me create a class to handle all the messy stuff. This is what my code looks like:

class Recorder {
  enum RecordingState {
    case recording, paused, stopped
  }
  
  private var engine: AVAudioEngine!
  private var mixerNode: AVAudioMixerNode!
  private var state: RecordingState = .stopped
    
  private var audioPlayer = AVAudioPlayerNode()
  
  init() {
    setupSession()
    setupEngine()
  }
    
    
  fileprivate func setupSession() {
      let session = AVAudioSession.sharedInstance()
      //The original tutorial sets the category to .record
      //try? session.setCategory(.record)
      try? session.setCategory(.playAndRecord, options: [.mixWithOthers, .defaultToSpeaker])
      try? session.setActive(true, options: .notifyOthersOnDeactivation)
   }
    
    fileprivate func setupEngine() {
      engine = AVAudioEngine()
      mixerNode = AVAudioMixerNode()

      // Set volume to 0 to avoid audio feedback while recording.
      mixerNode.volume = 0

      engine.attach(mixerNode)

    //Attach the audio player node
    engine.attach(audioPlayer)
        
      makeConnections()

      // Prepare the engine in advance, in order for the system to allocate the necessary resources.
      engine.prepare()
    }

    
    fileprivate func makeConnections() {
       
      let inputNode = engine.inputNode
      let inputFormat = inputNode.outputFormat(forBus: 0)
      engine.connect(inputNode, to: mixerNode, format: inputFormat)

      let mainMixerNode = engine.mainMixerNode
      let mixerFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: inputFormat.sampleRate, channels: 1, interleaved: false)
    
      engine.connect(mixerNode, to: mainMixerNode, format: mixerFormat)
        
      //AudioPlayer Connection
      let path = Bundle.main.path(forResource: "beep.mp3", ofType:nil)!
      let url = URL(fileURLWithPath: path)
      let file = try! AVAudioFile(forReading: url)
      engine.connect(audioPlayer, to: mainMixerNode, format: nil)
      audioPlayer.scheduleFile(file, at: nil)
        
    }
    
    
    //MARK: Start Recording Function
    func startRecording() throws {
        print("Start Recording!")
      let tapNode: AVAudioNode = mixerNode
      let format = tapNode.outputFormat(forBus: 0)

      let documentURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
        
      // AVAudioFile uses the Core Audio Format (CAF) to write to disk.
      // So we're using the caf file extension.
        let file = try AVAudioFile(forWriting: documentURL.appendingPathComponent("recording.caf"), settings: format.settings)
       
      tapNode.installTap(onBus: 0, bufferSize: 4096, format: format, block: {
        (buffer, time) in
        
        try? file.write(from: buffer)
        print(buffer.description)
        print(buffer.stride)
        let floatArray = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count:Int(buffer.frameLength)))
        
      })

      try engine.start()
      audioPlayer.play()
      state = .recording
    }
    
    
    //MARK: Other recording functions
    func resumeRecording() throws {
      try engine.start()
      state = .recording
    }

    func pauseRecording() {
      engine.pause()
      state = .paused
    }

    func stopRecording() {
      // Remove existing taps on nodes
      mixerNode.removeTap(onBus: 0)
      
      engine.stop()
      state = .stopped
    }
    
    
}

Upvotes: 1

Views: 554

Answers (2)

David Thery
David Thery

Reputation: 719

A slightly more complete answer, linked to an IBAction; in this example, I use .parametric for the filter type, with more bands than required, to give a broader insight on how to use it:

@IBAction func PlayWithEQ(_ sender: Any) {
    self.engine.stop()
    self.engine = AVAudioEngine()
    let player = AVAudioPlayerNode()
    let url = Bundle.main.url(forResource:"yoursong", withExtension: "m4a")!
    let f = try! AVAudioFile(forReading: url)
    self.engine.attach(player)

    // adding eq effect node
    let effect = AVAudioUnitEQ(numberOfBands: 4)
    let bands = effect.bands
    let freq = [125, 250, 2350, 8000]
    for i in 0...(bands.count - 1) {
        bands[i].frequency = Float(freq[i])
    }
    bands[0].gain = 0.0
    bands[0].filterType = .parametric
    bands[0].bandwidth = 1
    bands[1].gain = 0.0
    bands[1].filterType = .parametric
    bands[1].bandwidth = 0.5
    // filter of interest, rejecting 2350Hz (adjust bandwith as needed)
    bands[2].gain = -60.0
    bands[2].filterType = .parametric
    bands[2].bandwidth = 1
    bands[3].gain = 0.0
    bands[3].filterType = .parametric
    bands[3].bandwidth = 1

    self.engine.attach(effect)
    self.engine.connect(player, to: effect, format: f.processingFormat)
    let mixer = self.engine.mainMixerNode
    self.engine.connect(effect, to: mixer, format: f.processingFormat)
    player.scheduleFile(f, at: nil) {
        delay(0.05) {
            if self.engine.isRunning {
                self.engine.stop()
            }
        }
    }
    self.engine.prepare()
    try! self.engine.start()
    player.play()        
}

Upvotes: 0

sbooth
sbooth

Reputation: 16966

AVAudioUnitEQ supports a band-stop filter.

Perhaps something like:

// Create an instance of AVAudioUnitEQ and connect it to the engine's main mixer
let eq = AVAudioUnitEQ(numberOfBands: 1)
engine.attach(eq)
engine.connect(eq, to: engine.mainMixerNode, format: nil)
engine.connect(player, to: eq, format: nil)
eq.bands[0].frequency = 2350
eq.bands[0].filterType = .bandStop
eq.bands[0].bypass = false

Upvotes: 2

Related Questions