Bart van Kuik
Bart van Kuik

Reputation: 4852

AVCaptureConnection.previewLayer is nil in delegate AVCaptureMetadataOutputObjectsDelegate

The code below can run on an iPhone and when aimed at any QR code, should print the contents. The QR code below points to example.com.

enter image description here

The problem is that the delegate should supply the connection: AVCaptureConnection and it does, but its previewLayer property is nil.

The following code can be pasted into a new, empty Xcode project. If you disable (comment out) line 57, and enable line 56, it works fine. But I want to place the delegate outside the CaptureView class. How do I set up the capturing such that AVCaptureMetadataOutputObjectsDelegate its previewLayer property is not nil?

import UIKit
import AVFoundation

func printMetadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], previewLayer: AVCaptureVideoPreviewLayer) {
    for object in metadataObjects {
        let visualCodeObject = previewLayer.transformedMetadataObject(for: object)
        guard let object = visualCodeObject, let barcode = object as? AVMetadataMachineReadableCodeObject else {
            NSLog("Ignoring object that is not AVMetadataMachineReadableCodeObject")
            continue
        }
        
        guard let barcodeString = barcode.stringValue else {
            NSLog("Captured something that's not a string")
            continue
        }

        NSLog("Captured string %@", barcodeString)
    }
}

class CaptureView: UIView, AVCaptureMetadataOutputObjectsDelegate {
    private let previewLayer = AVCaptureVideoPreviewLayer()
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: self.previewLayer)
    }
    
    override func layoutSubviews() {
        super.layoutSubviews()
        self.previewLayer.frame = self.frame
    }
    
    init(frame: CGRect, delegate: AVCaptureMetadataOutputObjectsDelegate) {
        guard let captureDevice = AVCaptureDevice.default(for: .video) else {
            fatalError("Couldn't find default capture device")
        }
        
        guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else {
            super.init(frame: frame)
            return
        }
        
        let captureSession = AVCaptureSession()
        captureSession.addInput(captureDeviceInput)
        
        self.previewLayer.session = captureSession
        self.previewLayer.videoGravity = .resizeAspectFill
        
        super.init(frame: frame)
        
        self.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = self.frame
        captureSession.startRunning()
        
        let metadataOutput = AVCaptureMetadataOutput()
//        metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        metadataOutput.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        metadataOutput.rectOfInterest = CGRect(x: 0, y: 0, width: 1, height: 1)
        
        if captureSession.canAddOutput(metadataOutput) {
            captureSession.addOutput(metadataOutput)
        } else {
            fatalError("Can't add metadata output to capture session")
        }
        
        metadataOutput.metadataObjectTypes = [.qr]
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
}

class MetadataDelegate: NSObject, AVCaptureMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        guard let previewLayer = connection.videoPreviewLayer else {
            print("previewLayer was nil")
            return
        }
        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: previewLayer)
    }
}

class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
    private let metadataDelegate = MetadataDelegate()
    
    override func viewDidLoad() {
        let captureView = CaptureView(frame: CGRect(), delegate: self.metadataDelegate)
        captureView.frame = self.view.frame
        captureView.autoresizingMask = [.flexibleHeight, .flexibleWidth]
        self.view.addSubview(captureView)
    }
}

Upvotes: 2

Views: 753

Answers (2)

gcharita
gcharita

Reputation: 8327

As the videoPreviewLayer documentation states:

This property is the set if you initialized the connection using init(inputPort:videoPreviewLayer:) or connectionWithInputPort:videoPreviewLayer:.

So in order to get a value in videoPreviewLayer property you have to setup the AVCaptureConnection object manually.

Instead, I would suggest to hide AVCaptureMetadataOutputObjectsDelegate behind a custom protocol that you can declare:

protocol CaptureViewMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection, previewLayer: AVCaptureVideoPreviewLayer)
}

then, implement the AVCaptureMetadataOutputObjectsDelegate protocol in your CaptureView and call your protocol's function passing the required AVCaptureVideoPreviewLayer. Your code will be like this:

class CaptureView: UIView, AVCaptureMetadataOutputObjectsDelegate {
    private let previewLayer = AVCaptureVideoPreviewLayer()
    private let delegate: CaptureViewMetadataOutputObjectsDelegate
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        delegate.metadataOutput(output, didOutput: metadataObjects, from: connection, previewLayer: previewLayer)
//        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: self.previewLayer)
    }
    
    override func layoutSubviews() {
        super.layoutSubviews()
        self.previewLayer.frame = self.frame
    }
    
    init(frame: CGRect, delegate: CaptureViewMetadataOutputObjectsDelegate) {
        self.delegate = delegate
        guard let captureDevice = AVCaptureDevice.default(for: .video) else {
            fatalError("Couldn't find default capture device")
        }
        
        guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else {
            super.init(frame: frame)
            return
        }
        
        let captureSession = AVCaptureSession()
        captureSession.addInput(captureDeviceInput)
        
        self.previewLayer.session = captureSession
        self.previewLayer.videoGravity = .resizeAspectFill
        
        super.init(frame: frame)
        
        self.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = self.frame
        captureSession.startRunning()
        
        let metadataOutput = AVCaptureMetadataOutput()
        metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
//        metadataOutput.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        metadataOutput.rectOfInterest = CGRect(x: 0, y: 0, width: 1, height: 1)
        
        if captureSession.canAddOutput(metadataOutput) {
            captureSession.addOutput(metadataOutput)
        } else {
            fatalError("Can't add metadata output to capture session")
        }
        
        metadataOutput.metadataObjectTypes = [.qr]
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
}

class MetadataDelegate: CaptureViewMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection, previewLayer: AVCaptureVideoPreviewLayer) {
        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: previewLayer)
    }
}

class ViewController: UIViewController {
    private let metadataDelegate = MetadataDelegate()
    
    override func viewDidLoad() {
        let captureView = CaptureView(frame: CGRect(), delegate: self.metadataDelegate)
        captureView.frame = self.view.frame
        captureView.autoresizingMask = [.flexibleHeight, .flexibleWidth]
        self.view.addSubview(captureView)
    }
}

Update: After some research I did found this statement in the init(inputPort:videoPreviewLayer:) function documentation:

When using addInput(_:): or addOutput(_:), connections are automatically formed between all compatible inputs and outputs. You do not need to manually create and add connections to the session unless you use the primitive addInputWithNoConnections(_:) and addOutputWithNoConnections(_:) methods.

Tha means tha when you added the device camera as input and the AVCaptureMetadataOutput as output, all the compatible AVCaptureConnection are created automatically.

I tried to create another AVCaptureConnection using the following code:

if let port = captureDeviceInput.ports.first(where: { $0.mediaType == .video }) {
    let con = AVCaptureConnection(inputPort: port, videoPreviewLayer: self.previewLayer)
    if captureSession.canAddConnection(con) {
        captureSession.addConnection(con)
    }
}

but the canAddConnection(_:) function always returns false.

After that I went and printed the connections array that the AVCaptureSession has and I saw following:

(lldb) po captureSession.connections
[<AVCaptureConnection: 0x280d67980 (AVCaptureDeviceInput: 0x280d119a0 Back Camera) -> (AVCaptureVideoPreviewLayer: 0x280d6ba40) [type:vide][enabled:1][active:1]>, <AVCaptureConnection: 0x280d7bee0 (AVCaptureDeviceInput: 0x280d119a0 Back Camera) -> (AVCaptureMetadataOutput: 0x280d700e0) [type:mobj][enabled:1][active:1]>]

So, an AVCaptureConnection has been created with the Back Camera as input and a AVCaptureVideoPreviewLayer instance (possibly the one that you created, the previewLayer property) and another one with the Back Camera as input and the AVCaptureMetadataOutput that you passed to AVCaptureSession.

The first one, obviously, did have some value to the videoPreviewLayer property:

(lldb) po captureSession.connections[0].videoPreviewLayer
▿ Optional<AVCaptureVideoPreviewLayer>
  - some : <AVCaptureVideoPreviewLayer:0x280d6ba40; position = CGPoint (0 0); bounds = CGRect (0 0; 0 0); sublayers = (<CALayer: 0x280d6bc20>); masksToBounds = YES; allowsGroupOpacity = YES; inheritsTiming = NO; >

Apparently, the AVCaptureConnection instance that you are getting in metadataOutput(_:didOutput:from:) function will always be the second one. The one that is associating the Back Camera with the AVCaptureMetadataOutput.

Upvotes: 2

wzso
wzso

Reputation: 3885

I caught the bug.

In fact, even if when you enable this line:

metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)

in the correspondent metadataOutput(_, didOutput:, from:,) of CaptureView, connection.videoPreviewLayer is still nil. As the Developer Documentation says:

This property is the set if you initialized the connection using init(inputPort:videoPreviewLayer:) or connectionWithInputPort:videoPreviewLayer:.

So, in both way, connection.videoPreviewLayer will be nil.

I've update your code a little bit to make it work the way you want.

import UIKit
import AVFoundation

func printMetadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], previewLayer: AVCaptureVideoPreviewLayer) {
    for object in metadataObjects {
        let visualCodeObject = previewLayer.transformedMetadataObject(for: object)
        guard let object = visualCodeObject, let barcode = object as? AVMetadataMachineReadableCodeObject else {
            NSLog("Ignoring object that is not AVMetadataMachineReadableCodeObject")
            continue
        }
        
        guard let barcodeString = barcode.stringValue else {
            NSLog("Captured something that's not a string")
            continue
        }

        NSLog("Captured string %@", barcodeString)
    }
}

class CaptureView: UIView, AVCaptureMetadataOutputObjectsDelegate {
    let previewLayer = AVCaptureVideoPreviewLayer()
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        if connection.videoPreviewLayer == nil {
            print("connection.videoPreviewLayer was nil")
        }
        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: self.previewLayer)
    }
    
    override func layoutSubviews() {
        super.layoutSubviews()
        self.previewLayer.frame = self.frame
    }
    
    init(frame: CGRect, delegate: AVCaptureMetadataOutputObjectsDelegate) {
        guard let captureDevice = AVCaptureDevice.default(for: .video) else {
            fatalError("Couldn't find default capture device")
        }
        
        guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else {
            super.init(frame: frame)
            return
        }
        
        let captureSession = AVCaptureSession()
        captureSession.addInput(captureDeviceInput)
        
        self.previewLayer.session = captureSession
        self.previewLayer.videoGravity = .resizeAspectFill
        
        super.init(frame: frame)
        
        self.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = self.frame
        captureSession.startRunning()
        let metadataOutput = AVCaptureMetadataOutput()
//        metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        metadataOutput.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        metadataOutput.rectOfInterest = CGRect(x: 0, y: 0, width: 1, height: 1)
        
        if captureSession.canAddOutput(metadataOutput) {
            captureSession.addOutput(metadataOutput)
        } else {
            fatalError("Can't add metadata output to capture session")
        }
        
        metadataOutput.metadataObjectTypes = [.qr]
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
}

class MetadataDelegate: NSObject, AVCaptureMetadataOutputObjectsDelegate {
    var previewLayer: AVCaptureVideoPreviewLayer?
    
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        guard let previewLayer = previewLayer else {
            print("previewLayer was nil")
            return
        }

        printMetadataOutput(output, didOutput: metadataObjects, previewLayer: previewLayer)
    }
}

class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
    private let metadataDelegate = MetadataDelegate()
    
    override func viewDidLoad() {
        let captureView = CaptureView(frame: CGRect(), delegate: self.metadataDelegate)
        metadataDelegate.previewLayer = captureView.previewLayer
        captureView.frame = self.view.frame
        captureView.autoresizingMask = [.flexibleHeight, .flexibleWidth]
        self.view.addSubview(captureView)
    }
}

Upvotes: 0

Related Questions