TheNeuronalCoder
TheNeuronalCoder

Reputation: 33

How do you specify container format in FFmpeg?

I am trying to encode mp4 video, but it doesn't open on my macbook because while it has the supported H264 codec, the container format is not MP4. So all I ask is how you would go about specifying the container format so I can generate video that is actually playable without using ffplay.

ffprobe version N-101948-g870bfe1 Copyright (c) 2007-2021 the FFmpeg developers
  built with Apple LLVM version 10.0.1 (clang-1001.0.46.4)
  configuration: --disable-asm --enable-shared --enable-libx264 --enable-gpl
  libavutil      56. 72.100 / 56. 72.100
  libavcodec     58.136.101 / 58.136.101
  libavformat    58. 78.100 / 58. 78.100
  libavdevice    58. 14.100 / 58. 14.100
  libavfilter     7.111.100 /  7.111.100
  libswscale      5. 10.100 /  5. 10.100
  libswresample   3. 10.100 /  3. 10.100
  libpostproc    55. 10.100 / 55. 10.100
Input #0, h264, from 'animation.mp4':
  Duration: N/A, bitrate: N/A
  Stream #0:0: Video: h264 (High), yuv420p(progressive), 1920x1080, 24.33 fps, 24 tbr, 1200k tbn, 48 tbc
void imagine::Camera::Record() {
  if (recording_ == true)
    throw std::runtime_error(
      "you must close your camera before starting another recording"
    );
  recording_ = true;

  output_file_ = std::fopen(output_.c_str(), "wb");
  if (!output_file_)
    throw std::runtime_error("failed to open file");

  AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
  if (!codec)
    throw std::runtime_error("failed to find codec");

  context_ = avcodec_alloc_context3(codec);
  if (!context_)
    throw std::runtime_error("failed to allocate video codec context");

  packet_ = av_packet_alloc();
  if (!packet_)
    throw std::runtime_error("failed to allocate video packet");

  py::tuple size = py::globals()["main_scene"].attr("size");
  context_->width = size[0].cast<int>();
  context_->height = size[1].cast<int>();
  context_->bit_rate = 0.4 * fps_ * context_->width * context_->height;
  context_->time_base = (AVRational){ 1, fps_ };
  context_->framerate = (AVRational){ fps_, 1 };
  context_->gop_size = 10;
  context_->max_b_frames = 1;
  context_->pix_fmt = AV_PIX_FMT_YUV420P;

  if (avcodec_open2(context_, codec, NULL) < 0)
    throw std::runtime_error("failed to open codec");

  frame_ = av_frame_alloc();
  if (!frame_)
    throw std::runtime_error("failed to allocate video frame");

  frame_->width  = context_->width;
  frame_->height = context_->height;
  frame_->format = AV_PIX_FMT_YUV420P;

  if (av_frame_get_buffer(frame_, 0) < 0)
    throw std::runtime_error("failed to allocate the video frame data");
}

Upvotes: 1

Views: 1093

Answers (1)

BubLblckZ
BubLblckZ

Reputation: 463

You can encode the video into a temporary file, then when you're finished writing you remux it from H264 to MP4. Here's a function based on the documentation:

int Remux(const char* in_filename, const char* out_filename)
{
    AVOutputFormat* ofmt = nullptr;
    AVFormatContext* ifmt_ctx = nullptr, *ofmt_ctx = nullptr;

    AVPacket pkt2;

    int ret, i;

    int stream_index = 0;
    int* stream_mapping = nullptr;
    int stream_mapping_size = 0;

    if (avformat_open_input(&ifmt_ctx, in_filename, nullptr, nullptr) < 0) {
        std::cout << "Could not open input file" << std::endl;
        return 0;
    }

    if (avformat_find_stream_info(ifmt_ctx, nullptr) < 0) {
        std::cout << "Failed to retrieve input stream information" << std::endl;
        return 0;
    }

    av_dump_format(ifmt_ctx, 0, in_filename, 0);

    avformat_alloc_output_context2(&ofmt_ctx, nullptr, nullptr, out_filename);
    if (!ofmt_ctx) {
        std::cout << "Could not create output context" << std::endl;
        return 0;
    }

    stream_mapping_size = ifmt_ctx->nb_streams;

    stream_mapping = new int[stream_mapping_size];

    ofmt = ofmt_ctx->oformat;

    for (int i = 0; i < ifmt_ctx->nb_streams; i++) {
        AVStream* out_stream;
        AVStream* in_stream = ifmt_ctx->streams[i];
        AVCodecParameters* in_codecpar = in_stream->codecpar;

        if (in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO && in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO && in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE) {
            stream_mapping[i] = -1;
            continue;
        }

        stream_mapping[i] = stream_index++;

        out_stream = avformat_new_stream(ofmt_ctx, nullptr);
        if (!out_stream) {
            std::cout << "Failed to allocate memory to output stream" << std::endl;
            return 0;
        }

        ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
        if (ret < 0) {
            std::cout << "Failed to copy codec parameters" << std::endl;
            return 0;
        }

        out_stream->codecpar->codec_tag = 0;
    }
    av_dump_format(ofmt_ctx, 0, out_filename, 1);

    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
        if (ret < 0) {
            std::cout << "Could not open output file" << std::endl;
            return 0;
        }
    }

    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret < 0) {
        std::cout << "Error when opening output file" << std::endl;
        return 0;
    }

    int ii = 0;
    while (true) {
        AVStream* in_stream, * out_stream;
        ret = av_read_frame(ifmt_ctx, &pkt2);
        if (ret < 0) {
            break;
        }

        in_stream = ifmt_ctx->streams[pkt2.stream_index];
        if (pkt2.stream_index >= stream_mapping_size || stream_mapping[pkt2.stream_index] < 0) {
            av_packet_unref(&pkt2);
            continue;
        }

        pkt2.stream_index = stream_mapping[pkt2.stream_index];

        out_stream = ofmt_ctx->streams[pkt2.stream_index];

        pkt2.duration = av_rescale_q(pkt2.duration, in_stream->time_base, out_stream->time_base);
        pkt2.pts = ii;
        pkt2.dts = ii;
        ii += pkt2.duration;
        pkt2.pos = -1;

        ret = av_interleaved_write_frame(ofmt_ctx, &pkt2);
        if (ret < 0) {
            std::cout << "Error muxing packet" << std::endl;
            break;
        }
        av_packet_unref(&pkt2);
    }


    av_write_trailer(ofmt_ctx);

    avformat_close_input(&ifmt_ctx);

    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) {
        avio_closep(&ofmt_ctx->pb);
    }
    avformat_free_context(ofmt_ctx);

    return 0;
}

Upvotes: 1

Related Questions