ANANDA SANTRA
ANANDA SANTRA

Reputation: 9

Saying "Stop() called but track is not started or stopped" during merging a photo with a existing video by mediaMuxer

What is wrong in my code please help..... here is the total code at the end the execution its saying "Stop() called but track is not started or stopped"

and the output path i am getting that is corrupted and not containing merged video.

private fun mergeImageWithVideo(videoPath: String, imagePath: String) {

    val mediaMetadataRetriever = MediaMetadataRetriever()
    mediaMetadataRetriever.setDataSource(videoPath)
    val videoFrameCount =
        mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_FRAME_COUNT)
            ?.toInt() ?: 0
    val videoFrameRate =
        mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE)
            ?.toFloat() ?: 30.0f

    // Load your overlay image
    val overlayBitmap = BitmapFactory.decodeFile(imagePath)
    val videoExtractor = MediaExtractor()
    videoExtractor.setDataSource(videoPath)


    // Prepare the output video
    showELog("gsfdjhagfhjua ext ${output.extension}")
    val fileOutputStream = FileOutputStream(output.absolutePath)
    val fileDescriptor = fileOutputStream.fd
    val mediaMuxer =MediaMuxer(fileDescriptor,MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
    val audioTrackIndex = getAudioTrackIndex(videoExtractor)
    val audioFormat = videoExtractor.getTrackFormat(audioTrackIndex)
    val videoTrackIndex3 = mediaMuxer.addTrack(audioFormat)

    val videoTrackIndex = getVideoTrackIndex(videoExtractor)
    val videoFormat = videoExtractor.getTrackFormat(videoTrackIndex)
    val videoTrackIndex2 = mediaMuxer.addTrack(videoFormat)
   
    mediaMuxer.start()

    lifecycleScope.launch() {
        val tim = withContext(Dispatchers.IO) {


            // Iterate through video frames
            val frameInterval = (1000000.0 / videoFrameRate).toLong()

            for (i in 0 until videoFrameCount) {
                val frameTime = i * frameInterval
                val videoFrame = mediaMetadataRetriever.getFrameAtTime(
                    frameTime,
                    MediaMetadataRetriever.OPTION_CLOSEST_SYNC
                )
                val overlayedImage = overlayImageOnFrame(videoFrame!!, overlayBitmap)
                showELog("sdagagea  $overlayedImage")
                // Add the modified frame to the output video
                val buffer = ByteBuffer.allocate(overlayedImage.byteCount)
                overlayedImage.copyPixelsToBuffer(buffer)
                buffer.rewind()
                val bufferInfo = MediaCodec.BufferInfo()
                bufferInfo.presentationTimeUs = frameTime
                bufferInfo.size = buffer.remaining()
                mediaMuxer.writeSampleData(videoTrackIndex2, buffer, bufferInfo)

            }


            // Release resources

        }
        mediaMuxer.stop()
        mediaMuxer.release()
        mediaMetadataRetriever.release()
        fileOutputStream.close()

        output.absolutePath

        showELog("agdafgerag ${output.absolutePath}")
        //shareFile=tim
        // findNavController().navigateSafe(R.id.action_checkUploadedVideoFragment_to_uploadedVideoFragment,null)
        //showELog("testing new merge code ${output.absolutePath}")
    }

}

private fun overlayImageOnFrame(videoFrame: Bitmap, overlayBitmap: Bitmap): Bitmap { // Resize overlayBitmap if necessary to match the video frame size if (videoFrame.width != overlayBitmap.width || videoFrame.height != overlayBitmap.height) { val scaledOverlayBitmap = Bitmap.createScaledBitmap(overlayBitmap, videoFrame.width, videoFrame.height, false) val resultBitmap = Bitmap.createBitmap(videoFrame.width, videoFrame.height, videoFrame.config) val canvas = Canvas(resultBitmap) canvas.drawBitmap(videoFrame, 0f, 0f, null) canvas.drawBitmap(scaledOverlayBitmap, 0f, 0f, null) return resultBitmap } else { val resultBitmap = Bitmap.createBitmap(videoFrame.width, videoFrame.height, videoFrame.config) val canvas = Canvas(resultBitmap) canvas.drawBitmap(videoFrame, 0f, 0f, null) canvas.drawBitmap(overlayBitmap, 0f, 0f, null) return resultBitmap } }

private fun getVideoTrackIndex(extractor: MediaExtractor): Int {
    for (i in 0 until extractor.trackCount) {
        val format = extractor.getTrackFormat(i)
        val mime = format.getString(MediaFormat.KEY_MIME)
        if (mime?.startsWith("video/") == true) {
            return i
        }
    }
    throw RuntimeException("Video track not found")
}

private fun getAudioTrackIndex(extractor: MediaExtractor): Int {
    for (i in 0 until extractor.trackCount) {
        val format = extractor.getTrackFormat(i)
        val mime = format.getString(MediaFormat.KEY_MIME)
        if (mime?.startsWith("audio/") == true) {
            return i
        }
    }
    throw RuntimeException("Audio track not found")
}

I am trying to merge a image with a existing video . I want to get a merge video with original audio.

Upvotes: -1

Views: 398

Answers (1)

dev.bmax
dev.bmax

Reputation: 10601

There are multiple issues with your code:

  1. You added an audio track to the MediaMuxer, but you are not writing any samples for it. This is probably the reason for the error.
  2. You are reading video frames with MediaMetadataRetriever and OPTION_CLOSEST_SYNC. This way you can get only I-frames (usually one frame per second or longer).
  3. You are not encoding the video frames prior to writing them to the MediaMuxer although the declared video format probably implies some compression.

You can do both decoding and encoding using the MediaCodec API.

Upvotes: 2

Related Questions