Manish
Manish

Reputation: 53

Gstreamer C Code is failed with streaming stopped, reason not-negotiated (-4)

I am learning Gstreamer and whatever I have achieved through Gstreamer tools, I am trying to implement the same with gstreamer application using C language.

Below command streamed a mp4 video file successfully: gst-launch-1.0.exe -v filesrc location=file.mp4 ! qtdemux ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

I tried the same with C code and also used the "pad-added" Elements Signals to create pads and linked to the next element i.e. parser (h264parser).

So, it got failed with streaming stopped, reason not-negotiated.

Complete Output: Now playing: file.mp4 Running... A new pad video_0 was created for demux element demux will be linked to parser Error: Internal data stream error. Debug Information: ../gst/isomp4/qtdemux.c(6607): gst_qtdemux_loop (): /GstPipeline:video-play/GstQTDemux:demux: streaming stopped, reason not-negotiated (-4) Returned, stopping playback... Freeing pipeline... Completed. Goodbye!

#include <gst/gst.h>
#include <stdlib.h>
#include <string.h>

#define INPUT_FILE "file.mp4"

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;

    switch (GST_MESSAGE_TYPE(msg)) {
        gchar  *debug;
        GError *error;

    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(loop);
        break;

    case GST_MESSAGE_ERROR:

        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);

        g_printerr("Error: %s\n", error->message);
        g_printerr("Debug Information: %s\n", debug);
        g_error_free(error);

        g_main_loop_quit(loop);
        break;
    default:
        break;
    }

    return TRUE;
}


static void on_pad_added(GstElement *element, GstPad *pad, gpointer data)
{
    gchar *name;
    GstElement *parse = (GstElement *)data;

    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created for %s\n", name, gst_element_get_name(element));
    g_free(name);

    g_print("element %s will be linked to %s\n",
        gst_element_get_name(element),
        gst_element_get_name(parse));

    gst_element_link(element, parse);
}

int main(int argc, char *argv[])
{
    GMainLoop *loop;
    GstElement *pipeline, *source, *demux, *parser, *decoder, *sink, *fpssink;
    GstBus *bus;
    guint bus_watch_id;

    const gchar *input_file = INPUT_FILE;

    /* Initialization */
    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);

    /* Create gstreamer elements */
    pipeline = gst_pipeline_new("video-play");
    source = gst_element_factory_make("filesrc", "file-source");
    demux = gst_element_factory_make("qtdemux", "demux");
    parser = gst_element_factory_make("h264parse", "h264-parser");
    decoder = gst_element_factory_make("avdec_h264", "decoder");
    sink = gst_element_factory_make("d3dvideosink", "video-output");

    if (!pipeline || !source || !demux || !parser || !decoder || !sink) {
        g_printerr("One element could not be created. Exiting.\n");
        return -1;
    }

    /* Set input video file for source element */
    g_object_set(G_OBJECT(source), "location", input_file, NULL);

    /* we add a message handler */
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    /* Add all elements into the pipeline */
    /* pipeline---[ filesrc + qtdemux + h264parse + avdec_h264 + d3dvideosink ] */
    gst_bin_add_many(GST_BIN(pipeline), source, demux, parser, decoder, sink, NULL);

    /* Link the elements filesrc->demux together */

    if (gst_element_link(source, demux) != TRUE) {
        g_printerr("Element source->demux could not be linked.\n");
        gst_object_unref(pipeline);
        return -1;
    }
    /* h264parse -> avdec_h264 -> d3dvideosink */

    if (gst_element_link_many(parser, decoder, sink, NULL) != TRUE) {
            g_printerr("Many Elements could not be linked.\n");
            gst_object_unref(pipeline);
            return -1;
    }

    g_signal_connect(demux, "pad-added", G_CALLBACK(on_pad_added), parser);

    /* Set the pipeline to "playing" state */
    g_print("Now playing: %s\n", input_file);
    if (gst_element_set_state(pipeline,
        GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(pipeline);
        return -1;
    }

    g_print("Running...\n");
    g_main_loop_run(loop);


    /* Free resources and change state to NULL */
    gst_object_unref(bus);
    g_print("Returned, stopping playback...\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    g_print("Freeing pipeline...\n");
    gst_object_unref(GST_OBJECT(pipeline));
    g_print("Completed. Goodbye!\n");
    return 0;
}

Can you guys please let me know how to link these pads to the h264parser element to stream the video file. If possible, please explain how does these pads work in Gstreamer tools and applications

Upvotes: 4

Views: 10706

Answers (2)

shafeer v c
shafeer v c

Reputation: 63

Below command streamed a mp4 video file successfully: gst-launch-1.0.exe -v filesrc location=file.mp4 ! qtdemux ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

Ideally your pipeline should be :

gst-launch-1.0.exe -v filesrc location=file.mp4 ! qtdemux name=d d.video_0 ! queue ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

If you inspect qtdemux (gst-inspect-1.0 qtdemux), you will notice that the SINK pads have the below caps:

Capabilities: video/quicktime video/mj2 audio/x-m4a application/x-3gp

And if you inspect h264parse (gst-inspect-1.0 h264parse) , you will notice that the SRC Pads have the below caps :

SRC template: 'src' Availability: Always Capabilities: video/x-h264 parsed: true stream-format: { (string)avc, (string)avc3, (string)byte-stream } alignment: { (string)au, (string)nal }

When you try to link the sink pad of Qtdemux to the src pad of h264parse, you may have to collect the video caps to connect with h264parse.

I have used the below code for linking the qtdemux with h264parse inside "pad-added" signal:

static void pad_added_handler(GstElement *src, GstPad *new_pad, gpointer *data) {
   GstPadLinkReturn ret;
   GstCaps *new_pad_caps = NULL;
   GstStructure *new_pad_struct = NULL;
   const gchar *new_pad_type = NULL;
   GstElement *h264parse = (GstElement *) data;


   /* Check the new pad's type */
   new_pad_caps = gst_pad_get_current_caps(new_pad);
   new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
   new_pad_type = gst_structure_get_name(new_pad_struct);

    if (g_str_has_prefix(new_pad_type, "video/x-h264")) {
          GstPad *sink_pad_video = gst_element_get_static_pad (h264parse, "sink");
          ret = gst_pad_link(new_pad, sink_pad_video);
     }
   }

Note: You may need to link caps filters to filter a desired video capability of your video source or just try the condition as: if (g_str_has_prefix(new_pad_type, "video")) {}

But I am not sure how adding a queue solved your problem.

Hope this helps.

Upvotes: 3

Hossein
Hossein

Reputation: 95

I just edited and fixed the shafeer's answer.

Here is the code that works:

static void pad_added_handler(GstElement *src, GstPad *new_pad, gpointer *data) {
   GstPadLinkReturn ret;
   GstCaps *new_pad_caps = nullptr;
   GstStructure *new_pad_struct = nullptr;
   const gchar *new_pad_type = nullptr;
   GstElement *h264parse = (GstElement *) data;

   /* Check the new pad's type */
   new_pad_caps = gst_pad_get_current_caps(new_pad);
   new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
   new_pad_type = gst_structure_get_name(new_pad_struct);

    if (g_str_has_prefix(new_pad_type, "video/x-h264")) {
          GstPad *sink_pad_video = gst_element_get_static_pad (h264parse, "sink");
          ret = gst_pad_link(new_pad, sink_pad_video);
     }
}

Upvotes: 0

Related Questions