john smith
john smith

Reputation: 85

Combining drawing animation with live video using Swift and Pencilkit

I'm trying to create the ability for users to draw on the screen while recording a live video on the iphone. To do this, I'm enabling pencilkit for the drawing part and AVAnimation for the live recording. I'm recording each one separately and then merging the two into one video file into Firestore. My code compiles but only the live recording is saved. The drawing is not merged and I'm not sure why because no error is thrown. Code below.

class Test1: UIViewController {

var canvasView: PKCanvasView!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
var captureSession: AVCaptureSession!
var videoOutput: AVCaptureMovieFileOutput!
var isRecording = false
var outputFileURL: URL?
var frameRate: Double = 30 // Adjust frame rate as needed
var animationDuration: TimeInterval = 5 // Adjust animation duration as needed
var canvasSnapshotURLs = [URL]()

override func viewDidLoad() {
    super.viewDidLoad()

    // Set up video capture session
    captureSession = AVCaptureSession()

    guard let backCamera = AVCaptureDevice.default(for: .video),
          let input = try? AVCaptureDeviceInput(device: backCamera) else {
        fatalError("Unable to access camera")
    }

    captureSession.addInput(input)

    videoOutput = AVCaptureMovieFileOutput()
    captureSession.addOutput(videoOutput)

    videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    videoPreviewLayer.videoGravity = .resizeAspectFill
    view.layer.addSublayer(videoPreviewLayer)

    // Start the capture session
    captureSession.startRunning()

    // Create canvas view
    canvasView = PKCanvasView(frame: view.bounds)
    canvasView.delegate = self
    canvasView.backgroundColor = .clear
    canvasView.tool = PKInkingTool(.pen, color: .black, width: 10)
    canvasView.drawingPolicy = .anyInput
    canvasView.translatesAutoresizingMaskIntoConstraints = false
    view.addSubview(canvasView)

    // Add constraints
    NSLayoutConstraint.activate([
        canvasView.topAnchor.constraint(equalTo: view.topAnchor),
        canvasView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
        canvasView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
        canvasView.trailingAnchor.constraint(equalTo: view.trailingAnchor)
    ])

    // If you want to allow drawing with finger
    canvasView.allowsFingerDrawing = true

    // Add clear button
    let clearButton = UIButton(type: .system)
    clearButton.setTitle("Clear", for: .normal)
    clearButton.addTarget(self, action: #selector(clearCanvas), for: .touchUpInside)
    clearButton.translatesAutoresizingMaskIntoConstraints = false
    view.addSubview(clearButton)

    // Add constraints for clear button
    NSLayoutConstraint.activate([
        clearButton.centerXAnchor.constraint(equalTo: view.centerXAnchor),
        clearButton.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -20)
    ])

    // Add record button
    let recordButton = UIButton(type: .system)
    recordButton.setTitle("Record", for: .normal)
    recordButton.addTarget(self, action: #selector(toggleRecording), for: .touchUpInside)
    recordButton.translatesAutoresizingMaskIntoConstraints = false
    view.addSubview(recordButton)

    // Add constraints for record button
    NSLayoutConstraint.activate([
        recordButton.centerXAnchor.constraint(equalTo: view.centerXAnchor),
        recordButton.bottomAnchor.constraint(equalTo: clearButton.topAnchor, constant: -20)
    ])
}

@objc func clearCanvas() {
    canvasView.drawing = PKDrawing()
}

@objc func toggleRecording() {
    if isRecording {
        stopRecording()
    } else {
        startRecording()
    }
}

func startRecording() {
    let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let outputPath = "\(documentsPath)/output3.mov"
    outputFileURL = URL(fileURLWithPath: outputPath)
    videoOutput.startRecording(to: outputFileURL!, recordingDelegate: self)
    isRecording = true
}

func stopRecording() {
    videoOutput.stopRecording()
    isRecording = false

    // Save the recorded video to Firestore Storage
    if let outputFileURL = outputFileURL {
        saveVideoToFirestoreStorage(url: outputFileURL)
    }

    // Merge recorded video with canvas drawing frames
    mergeVideoWithDrawingFrames()
}

func saveVideoToFirestoreStorage(url: URL) {
    let storage = Storage.storage()
    let storageRef = storage.reference()
    let videoRef = storageRef.child("videos/\(UUID().uuidString).mov")

    videoRef.putFile(from: url, metadata: nil) { metadata, error in
        guard let _ = metadata else {
            print("Error uploading video: \(error?.localizedDescription ?? "Unknown error")")
            return
        }
        videoRef.downloadURL { (url, error) in
            if let downloadURL = url {
                print("Video uploaded successfully. Download URL: \(downloadURL)")
            } else {
                print("Error getting download URL: \(error?.localizedDescription ?? "Unknown error")")
            }
        }
    }
}

func mergeVideoWithDrawingFrames() {
    guard let outputFileURL = outputFileURL else {
        return
    }

    let composition = AVMutableComposition()
    let mainVideoAsset = AVAsset(url: outputFileURL)
    guard let videoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
        return
    }

    do {
        try videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: mainVideoAsset.duration), of: mainVideoAsset.tracks(withMediaType: .video)[0], at: .zero)
    } catch {
        print("Error inserting video track: \(error.localizedDescription)")
        return
    }

    let imageGenerator = AVAssetImageGenerator(asset: mainVideoAsset)
    imageGenerator.appliesPreferredTrackTransform = true

    // Generate canvas drawing frames as images
    let totalFrames = Int(animationDuration * frameRate)
    var canvasSnapshotURLs: [URL] = []
    for i in 0..<totalFrames {
        let time = CMTime(seconds: Double(i) * (1.0 / frameRate), preferredTimescale: 600)
        do {
            let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
            let uiImage = UIImage(cgImage: cgImage)
            let drawingFrameURL = FileManager.default.temporaryDirectory.appendingPathComponent("drawing_frame_\(i).png")
            try uiImage.pngData()?.write(to: drawingFrameURL)
            canvasSnapshotURLs.append(drawingFrameURL)
        } catch {
            print("Error generating drawing frame: \(error.localizedDescription)")
        }
    }

    // Add drawing frames to video
    let videoSize = mainVideoAsset.tracks(withMediaType: .video)[0].naturalSize
    let videoLayer = CALayer()
    videoLayer.frame = CGRect(origin: .zero, size: videoSize)

    let parentLayer = CALayer()
    parentLayer.frame = CGRect(origin: .zero, size: videoSize)
    parentLayer.addSublayer(videoLayer)

    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = videoSize
    videoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(frameRate))
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)

    let videoTrackInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    instruction.layerInstructions = [videoTrackInstruction]
    videoComposition.instructions = [instruction]

    // Create a video writer
    let videoPath = FileManager.default.temporaryDirectory.appendingPathComponent("final_video.mov")
    let videoWriter: AVAssetWriter
    do {
        videoWriter = try AVAssetWriter(outputURL: videoPath, fileType: .mov)
    } catch {
        print("Error creating AVAssetWriter: \(error.localizedDescription)")
        return
    }

    videoWriter.shouldOptimizeForNetworkUse = true
    let videoSettings: [String: Any] = [
        AVVideoCodecKey: AVVideoCodecType.h264,
        AVVideoWidthKey: videoSize.width,
        AVVideoHeightKey: videoSize.height
    ]

    guard let videoWriterInput = try? AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings) else {
        print("Error creating AVAssetWriterInput")
        return
    }

    videoWriterInput.expectsMediaDataInRealTime = false
    videoWriter.add(videoWriterInput)

    // Create a video writer adaptor
    let videoWriterPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: nil)

    // Start writing process
    videoWriter.startWriting()
    videoWriter.startSession(atSourceTime: .zero)

    let queue = DispatchQueue(label: "DrawingFrameQueue")

    // Append drawing frames to video
    var frameCount = 0
    let frameDuration = CMTime(value: 1, timescale: Int32(frameRate))
    videoWriterInput.requestMediaDataWhenReady(on: queue) {
        while videoWriterInput.isReadyForMoreMediaData && frameCount < totalFrames {
            if frameCount < canvasSnapshotURLs.count {
                let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameCount))
                guard let drawingFrameImage = UIImage(contentsOfFile: canvasSnapshotURLs[frameCount].path),
                      let drawingFramePixelBuffer = self.pixelBuffer(from: drawingFrameImage.cgImage!) else {
                    continue
                }
                if !videoWriterPixelBufferAdaptor.append(drawingFramePixelBuffer, withPresentationTime: presentationTime) {
                    print("Error writing drawing frame at time \(presentationTime)")
                }
            }
            frameCount += 1
        }
        videoWriterInput.markAsFinished()
        videoWriter.finishWriting {
            self.saveMergedVideo(videoPath)
        }
    }
}




                                                        func saveMergedVideo(_ videoURL: URL) {
                                                                         // Save the merged video to Firestore Storage
                                                                         let storage = Storage.storage()
                                                                         let storageRef = storage.reference()
                                                                         let videoRef = storageRef.child("merged_videos/\(UUID().uuidString).mov")

                                                                         videoRef.putFile(from: videoURL, metadata: nil) { metadata, error in
                                                                             guard let _ = metadata else {
                                                                                 print("Error uploading merged video: \(error?.localizedDescription ?? "Unknown error")")
                                                                                 return
                                                                             }
                                                                             videoRef.downloadURL { (url, error) in
                                                                                 if let downloadURL = url {
                                                                                     print("Merged video uploaded successfully. Download URL: \(downloadURL)")
                                                                                 } else {
                                                                                     print("Error getting download URL: \(error?.localizedDescription ?? "Unknown error")")
                                                                                 }
                                                                             }
                                                                         }
                                                                     }

                                                                     func pixelBuffer(from image: CGImage) -> CVPixelBuffer? {
                                                                         let width = image.width
                                                                         let height = image.height

                                                                         var pixelBuffer: CVPixelBuffer?
                                                                         let options: [String: Any] = [
                                                                             kCVPixelBufferCGImageCompatibilityKey as String: true,
                                                                             kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
                                                                         ]
                                                                         let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, options as CFDictionary, &pixelBuffer)

                                                                         guard let buffer = pixelBuffer, status == kCVReturnSuccess else {
                                                                             return nil
                                                                         }

                                                                         CVPixelBufferLockBaseAddress(buffer, [])
                                                                         let pixelData = CVPixelBufferGetBaseAddress(buffer)

                                                                         let colorSpace = CGColorSpaceCreateDeviceRGB()
                                                                         let bytesPerPixel = 4
                                                                         let bytesPerRow = bytesPerPixel * width
                                                                         let context = CGContext(data: pixelData, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)

                                                                         context?.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))

                                                                         CVPixelBufferUnlockBaseAddress(buffer, [])

                                                                         return buffer
                                                                     }

                                                                     override func viewDidLayoutSubviews() {
                                                                         super.viewDidLayoutSubviews()
                                                                         videoPreviewLayer.frame = view.bounds
                                                                     }
                                                                         }

                                                                 extension Test1: PKCanvasViewDelegate {
                                                                     func canvasViewDrawingDidChange(_ canvasView: PKCanvasView) {
                                                                         // Handle drawing changes if needed
                                                                     }
                                                                 }

                                                                         extension Test1: AVCaptureFileOutputRecordingDelegate {
        func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
            if let error = error {
                print("Error recording video: \(error.localizedDescription)")
            }
        }
    }

Upvotes: 0

Views: 94

Answers (0)

Related Questions