Reputation: 774
I am making an app that records video. Up until now, I have been able to successfully record video and audio using AVCaptureMovieFileOutput
, however, I now have a need to edit the video frames in real time to overlay some data onto the video. I began the switch to AVAssetWriter
.
After the switch, I am able to record video (with my overlays) just fine using AVCaptureVideoDataOutput
, however, AVCaptureAudioDataOutput
never calls the delegate method so my audio doesn't record.
This is how I set up my AVCaptureSession:
fileprivate func setupCamera() {
//Set queues
queue = DispatchQueue(label: "myqueue", qos: .utility, attributes: .concurrent, autoreleaseFrequency: DispatchQueue.AutoreleaseFrequency.inherit, target: DispatchQueue.global())
//The size of output video will be 720x1280
print("Established AVCaptureSession")
cameraSession.sessionPreset = AVCaptureSession.Preset.hd1280x720
//Setup your camera
//Detect which type of camera should be used via `isUsingFrontFacingCamera`
let videoDevice: AVCaptureDevice
videoDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.front)!
print("Created AVCaptureDeviceInput: video")
//Setup your microphone
var audioDevice: AVCaptureDevice
//audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)!
audioDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInMicrophone, for: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)!
print("Created AVCaptureDeviceInput: audio")
do {
cameraSession.beginConfiguration()
cameraSession.automaticallyConfiguresApplicationAudioSession = false
cameraSession.usesApplicationAudioSession = true
// Add camera to your session
let videoInput = try AVCaptureDeviceInput(device: videoDevice)
if cameraSession.canAddInput(videoInput) {
cameraSession.addInput(videoInput)
print("Added AVCaptureDeviceInput: video")
} else
{
print("Could not add VIDEO!!!")
}
// Add microphone to your session
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if cameraSession.canAddInput(audioInput) {
cameraSession.addInput(audioInput)
print("Added AVCaptureDeviceInput: audio")
} else
{
print("Could not add MIC!!!")
}
//Define your video output
videoDataOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
]
videoDataOutput.alwaysDiscardsLateVideoFrames = true
if cameraSession.canAddOutput(videoDataOutput) {
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
cameraSession.addOutput(videoDataOutput)
print("Added AVCaptureDataOutput: video")
}
//Define your audio output
if cameraSession.canAddOutput(audioDataOutput) {
audioDataOutput.setSampleBufferDelegate(self, queue: queue)
cameraSession.addOutput(audioDataOutput)
print("Added AVCaptureDataOutput: audio")
}
//Set up the AVAssetWriter (to write to file)
do {
videoWriter = try AVAssetWriter(outputURL: getURL()!, fileType: AVFileType.mp4)
print("Setup AVAssetWriter")
//Video Settings
let videoSettings: [String : Any] = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 720,
AVVideoHeightKey : 1280,
]
videoWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
videoWriterVideoInput?.expectsMediaDataInRealTime = true;
print("Setup AVAssetWriterInput: Video")
if (videoWriter?.canAdd(videoWriterVideoInput!))!
{
videoWriter?.add(videoWriterVideoInput!)
print("Added AVAssetWriterInput: Video")
} else{
print("Could not add VideoWriterInput to VideoWriter")
}
// Add the audio input
//Audio Settings
let audioSettings : [String : Any] = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVSampleRateKey : 44100,
AVEncoderBitRateKey : 64000,
AVNumberOfChannelsKey: 1
]
videoWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioSettings)
videoWriterAudioInput?.expectsMediaDataInRealTime = true;
print("Setup AVAssetWriterInput: Audio")
if (videoWriter?.canAdd(videoWriterAudioInput!))!
{
videoWriter?.add(videoWriterAudioInput!)
print("Added AVAssetWriterInput: Audio")
} else{
print("Could not add AudioWriterInput to VideoWriter")
}
}
catch {
print("ERROR")
return
}
//PixelWriter
videoWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterVideoInput!, sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String: 1280,
kCVPixelBufferHeightKey as String: 768,
kCVPixelFormatOpenGLESCompatibility as String: true,
])
print("Created AVAssetWriterInputPixelBufferAdaptor")
//Present the preview of video
previewLayer = AVCaptureVideoPreviewLayer(session: cameraSession)
previewLayer.position = CGPoint.init(x: CGFloat(self.view.frame.width/2), y: CGFloat(self.view.frame.height/2))
previewLayer.bounds = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
print("Created AVCaptureVideoPreviewLayer")
//Don't forget start running your session
//this doesn't mean start record!
cameraSession.commitConfiguration()
cameraSession.startRunning()
}
catch let error {
debugPrint(error.localizedDescription)
}
}
Start recording:
func startRecording()
{
print("Begin Recording...")
let recordingClock = self.cameraSession.masterClock
isRecording = true
videoWriter?.startWriting()
videoWriter?.startSession(atSourceTime: CMClockGetTime(recordingClock!))
}
Stop recording:
func stopRecording()
{
if (videoWriter?.status.rawValue == 1) {
videoWriterVideoInput?.markAsFinished()
videoWriterAudioInput?.markAsFinished()
print("video finished")
print("audio finished")
}else{
print("not writing")
}
self.videoWriter?.finishWriting(){
self.isRecording = false
print("finished writing")
DispatchQueue.main.async{
if self.videoWriter?.status == AVAssetWriterStatus.failed {
print("status: failed")
}else if self.videoWriter?.status == AVAssetWriterStatus.completed{
print("status: completed")
}else if self.videoWriter?.status == AVAssetWriterStatus.cancelled{
print("status: cancelled")
}else{
print("status: unknown")
}
if let e=self.videoWriter?.error{
print("stop record error:", e)
}
}
}
print("Stop Recording!")
}
And this is the delegate method, which gets called for video, but not for audio:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
autoreleasepool {
guard captureOutput != nil,
sampleBuffer != nil,
connection != nil,
CMSampleBufferDataIsReady(sampleBuffer) else { return }
guard CMSampleBufferDataIsReady(sampleBuffer) else{
return
}
if (connection.isVideoOrientationSupported) {
connection.videoOrientation = currentVideoOrientation()
} else
{
return
}
if (connection.isVideoStabilizationSupported) {
//connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
if !self.isRecording
{
return
}
var string = ""
if let audio = self.videoWriterAudioInput
{
if connection.audioChannels.count > 0
{
//EXECUTION NEVER REACHES HERE
if audio.isReadyForMoreMediaData
{
queue!.async() {
audio.append(sampleBuffer)
}
return
}
}
}
print ("\(string)")
if let camera = self.videoWriterVideoInput, camera.isReadyForMoreMediaData {
//This is getting called!!!
queue!.async() {
self.videoWriterInputPixelBufferAdaptor.append(self.imageToBuffer(from: image!)!, withPresentationTime: timestamp)
}
}
}//End autoreleasepool
}
}
I am sure the problem does not lie with my devices or inputs, as I was able to successfully record video and audio using AVCaptureMovieFileOutput
. I have also read other relevant posts with no luck:
Corrupt video capturing audio and video using AVAssetWriter
VAssetWriter audio with video together
Upvotes: 2
Views: 3976
Reputation: 774
Ripped my hair out for days on this. My mistake was simple - The delegate method was being called, but was being returned BEFORE I reached the audio statements. These were the culprits which needed to be moved to after the audio processing portion of my code:
if (connection.isVideoOrientationSupported) {
connection.videoOrientation = currentVideoOrientation()
} else
{
return
}
if (connection.isVideoStabilizationSupported) {
//connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
Upvotes: 1