The Nomad
The Nomad

Reputation: 7425

Swift: use of 'self' in method call before super.init initializes self compile error

I made a custom class that handles audio recording/playback and put a Protocol in that class. I implemented the Protocol in a UIViewController class and called my setDelegate method for my AudioHelper class.

I am getting a compile error that has to do with my init(). Not exactly sure how to get rid of the error:

use of 'self' in method call 'setupAudioSession' before super.init initializes self

override init() {
        setupAudioSession()
        super.init()
    }

How do I resolve this error? And why do I have to override init()?

My AudioHelper class

import Foundation
import AVFoundation

class AudioHelper: NSObject, AVAudioRecorderDelegate {

    var audioSession: AVAudioSession?
    var audioRecorder: AVAudioRecorder?
    var delegate: AudioRecorderProtocol?

    class var sharedInstance: AudioHelper {

        struct Static {
            static var instance: AudioHelper?
            static var token: dispatch_once_t = 0
        }

        dispatch_once(&Static.token) {
            Static.instance = AudioHelper()
        }

        return Static.instance!
    }

    override init() {
        setupAudioSession()
        super.init()
    }

    func setDelegate(delegate: AudioRecorderProtocol) {
        self.delegate = delegate
    }

    func setupAudioSession() {
        audioSession = AVAudioSession.sharedInstance()
        audioSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, error: nil)
        audioSession?.setActive(true, error: nil)
    }

    func createAudioMessageDirectory() {
        let fm = NSFileManager.defaultManager()
        if !fm.fileExistsAtPath(GlobalVars.kAudioMessageDirectory) {
            var error: NSError?
            if !fm.createDirectoryAtPath(GlobalVars.kAudioMessageDirectory, withIntermediateDirectories: true, attributes: nil, error: &error) {
                println("Unable to create audio message directory: \(error)")
            }
        }
    }

    // MARK: Recording

    func beginRecordingAudio() {
        createAudioMessageDirectory()
        var filepath =  GlobalVars.kAudioMessageDirectory.stringByAppendingPathComponent("audiofile.aac")
        var url = NSURL(fileURLWithPath: filepath)

        var recordSettings = [
            AVFormatIDKey: kAudioFormatMPEG4AAC,
            AVSampleRateKey: 8000.0,
            AVNumberOfChannelsKey: 1,
            AVEncoderBitRateKey: 12800,
            AVLinearPCMBitDepthKey: 16,
            AVEncoderAudioQualityKey: AVAudioQuality.Max.rawValue
        ]

        println("Recorded Audio Message Saved: \(url!)")

        var error: NSError?
        audioRecorder = AVAudioRecorder(URL: url, settings: recordSettings as [NSObject : AnyObject], error: &error)

        if error == nil {
            if audioRecorder != nil {
                audioRecorder!.delegate = self
                audioRecorder!.record()
            }
        }
        else {
            println(error!.localizedDescription)
        }
    }

    func stopRecordingAudio() {
        if audioRecorder != nil {
            audioRecorder!.stop()
        }
    }

    func handleRecordAudioButtonLongPressGestureForState(state: UIGestureRecognizerState) {
        if state == UIGestureRecognizerState.Ended {
            stopRecordingAudio()
            delegate?.onRecordAudioStop()
        }
        else if state == UIGestureRecognizerState.Began {
            beginRecordingAudio()
            delegate?.onRecordAudioStop()
        }
    }

    func audioRecorderDidFinishRecording(recorder: AVAudioRecorder!, successfully flag: Bool) {
        println("Record Audio Success: \(flag)")
        delegate?.onRecordAudioFinished()
    }

    func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder!, error: NSError!) {
        println("Record Audio Encode Error: \(error.localizedDescription)")
    }

    // MARK: Playback

    func playAudioMessageFromUrl(messageId: String) {
        if let url = NSURL(string: GlobalVars.kUrlAudioMessage + messageId) {
            if let data = NSData(contentsOfURL: url) {
                var error: NSError? = nil
                let audioPlayer = AVAudioPlayer(data: data, error: &error)

                if error == nil {
                    if audioPlayer != nil {
                        audioPlayer.numberOfLoops = 0
                        audioPlayer.volume = 1.0
                        audioPlayer.prepareToPlay()
                        audioPlayer.play()
                    }
                }
                else {
                    println("Audio playback error: \(error?.localizedDescription)")
                }
            }
        }
    }

}

protocol AudioRecorderProtocol {
    func onRecordAudioStart()
    func onRecordAudioStop()
    func onRecordAudioFinished()
}

My UIViewController that implements the protocol (cut out extraneous code)

class ChatViewController: UIViewController, UITableViewDelegate, UITableViewDataSource, AudioRecorderProtocol {

    let audioHelper = AudioHelper.sharedInstance

    let appDelegate = UIApplication.sharedApplication().delegate as! AppDelegate

    override func viewDidLoad() {
        super.viewDidLoad()

//        addDemoMessages()

        setupGestureRecognizer()
        setupKeyboardObserver()
        setupViews()
        setupTableView()
        audioHelper.setDelegate(self)
    }

    override func viewWillAppear(animated: Bool) {
        super.viewWillAppear(animated)
        getUsersFromDb()
        getMessagesFromDb()
    }

    override func viewDidAppear(animated: Bool) {
        super.viewDidAppear(animated)
        setCurrentVC()
        tableView.reloadData()

        if partnerUserId != nil && !db.doesUserExist(partnerUserId!) {
            HttpPostHelper.profileGet(userId: partnerUserId!)
        }

        requestMessagesFromServer()
    }

    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        ViewHelper.scrollTableViewToBottom(tableView)
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    func handleRecordAudioButtonHold(sender: UILongPressGestureRecognizer) {
        audioHelper.handleRecordAudioButtonLongPressGestureForState(sender.state)
    }

    func onRecordAudioStart() {
        dispatch_async(dispatch_get_main_queue(), {
            ViewHelper.showToast(NSLocalizedString("RECORDING", comment: ""))
            self.recordAudioButton.imageView!.image = UIImage(named: "RecordAudioClicked")
        })
    }

    func onRecordAudioStop() {
        dispatch_async(dispatch_get_main_queue(), {
            self.recordAudioButton.imageView!.image = UIImage(named: "RecordAudio")
        })
    }

    func onRecordAudioFinished() {
        HttpPostHelper.messageAudio(partnerUserId: partnerUserId)
    }

    func playAudioFromUrl(sender: UIButton) {
        let messageId = messages[sender.tag].id
        audioHelper.playAudioMessageFromUrl(messageId)
    }

}

Upvotes: 25

Views: 28408

Answers (2)

oskarko
oskarko

Reputation: 4178

If you are working with the MVVM pattern, and you really need to call to some function before the super.init, you can always move that function to the ViewModel and call it from there. Just inject the viewModel as an injected dependency.

Upvotes: 0

Stefan Salatic
Stefan Salatic

Reputation: 4513

Just place it under super.init().

The object needs to be initialized first by the super class and then you can do your custom initialization.

override init() {
    super.init()
    setupAudioSession()
}

Upvotes: 58

Related Questions