Reputation: 4973
When I record video using the following view controller:
class AVCameraViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
initializeMotionManager()
sessionQueue.async {
let movieFileOutput = AVCaptureMovieFileOutput()
if self.session.canAddOutput(movieFileOutput) {
self.session.beginConfiguration()
self.session.addOutput(movieFileOutput)
self.session.sessionPreset = .high
if let connection = movieFileOutput.connection(with: .video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
self.session.commitConfiguration()
movieFileOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 60)
self.movieFileOutput = movieFileOutput
DispatchQueue.main.async {
self.recordButton.isEnabled = true
}
}
}
}
func fileOutput(_ output: AVCaptureFileOutput,
didFinishRecordingTo outputFileURL: URL,
from connections: [AVCaptureConnection],
error: Error?) {
// Note: Since we use a unique file path for each recording, a new recording won't overwrite a recording mid-
save.
UIApplication.shared.isIdleTimerDisabled = false
func cleanup() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
if let currentBackgroundRecordingID = backgroundRecordingID {
backgroundRecordingID = UIBackgroundTaskIdentifier.invalid
if currentBackgroundRecordingID != UIBackgroundTaskIdentifier.invalid {
UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
}
}
}
var success = true
if error != nil {
print("Movie file finishing error: \(String(describing: error))")
success = (((error! as NSError).userInfo[AVErrorRecordingSuccessfullyFinishedKey] as AnyObject).boolValue)!
}
if success {
// Check authorization status.
UIView.animate(withDuration: 0.5){
self.overlay.alpha = 0.9
self.navigationController?.navigationBar.isTranslucent = false
}
footageURL = outputFileURL
performSegue(withIdentifier: "TrimFootage", sender: nil)
} else {
cleanup()
}
// Enable the Camera and Record buttons to let the user switch camera and start another recording.
DispatchQueue.main.async {
// Only enable the ability to change camera if the device has more than one camera.
self.recordButton.isEnabled = true
// self.recordButton.setImage(#imageLiteral(resourceName: "CaptureVideo"), for: [])
}
}
}
As you can see I am setting the maxRecordedDuration to 2 minutes. When its done recording successfully, it eventually segues to another view controller.
The problem is right now it only records for a minute and then stops recording and segues. Im not sure if Im not setting the maxRecordedDuration correctly or if I have to be doing something else instead.
Upvotes: 4
Views: 1112
Reputation: 610
If you cannot sort it out by maxRecordedDuration, may I suggest to try and remove that and set a timer instead. It would trigger when recording starts and run for 120 secs. If you press stop before, just invalidate it so it doesn't trigger. If timer runs to end, just call stopRecording function that will stop recording and trigger segue you want.
Would that work out?
private var timer: Timer?
private func startRecording() {
// Code to start recording, you can start timer here once you start recording
self.timer = Timer.scheduledTimer(withTimeInterval: 120, repeats: false, block: { [weak self] (t) in
guard let welf = self else {return}
welf.stopRecording()
})
}
private func stopRecording() {
// Code to stop recording and segue further
}
Upvotes: 0
Reputation: 2073
I just used this code: (Most of it comes from this accepted answer. Thanks @gwinyai. You should upvote his answer. I already did. ;)
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
@IBOutlet weak var camPreview: UIView!
let cameraButton = UIView()
let captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var activeInput: AVCaptureDeviceInput!
var outputURL: URL!
override func viewDidLoad() {
super.viewDidLoad()
movieOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 600)
if setupSession() {
setupPreview()
startSession()
}
cameraButton.isUserInteractionEnabled = true
let cameraButtonRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.startCapture))
cameraButton.addGestureRecognizer(cameraButtonRecognizer)
cameraButton.frame = CGRect(x: 0, y: 0, width: 100, height: 100)
cameraButton.backgroundColor = UIColor.red
camPreview.addSubview(cameraButton)
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camPreview.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
camPreview.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)!
do {
let input = try AVCaptureDeviceInput(device: camera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)!
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
// Video Mode
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
@objc func startCapture() {
if movieOutput.isRecording == false {
startRecording()
print("-------- startRecording --------")
} else {
stopRecording()
print("-------- stopRecording --------")
}
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
if movieOutput.isRecording == false {
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error,
let nserror = error as NSError? {
switch nserror.code {
case AVError.Code.maximumDurationReached.rawValue:
//no error because we want to stop if max duration is reached
print(output.maxRecordedDuration.seconds, "<<<<<<<<<<<<" )
recordingEnded()
return
default:
//handle error
print(nserror.userInfo)
break
}
} else {
//user manually stopped the video before maxduration was reached
recordingEnded()
}
}
func recordingEnded() -> Void {
print("recording ended successfully")
let videoRecorded = outputURL! as URL
}
}
to stop recording after 120 seconds.
It works! You just need to add the camPreview
in Storyboard and make sure you have Privacy - Microphone Usage Description
and Privacy - Camera Usage Description
added to your .plist
.
Why movieOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 600)
Apple recommends a timescale of 600 for video, with the explanation that 600 is a multiple of the common video framerates (24, 25, and 30 FPS). You might want to crank this up to 60,000 or higher if you need sample-exact indexing on audio files. ....
Check here.
Upvotes: 1
Reputation: 1264
I'm assuming self.session refers to an instance of AVCaptureSession. Try moving the maxRecordedDefinition
property definition right after the definition of movieFileOutput
let movieFileOutput = AVCaptureMovieFileOutput()
movieFileOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 1)
You may be prematurely committing the session configuration with incorrect settings.
(Also, use a preferredTimescale of 1, representing one whole second - but I think you already noted that in a comment above.)
Upvotes: 0