Reputation: 231
I've created a C++ gstreamer application that loads raw 8-bit RGB frames to the pipeline through the appsrc element. The BGR frames are generated via AirSim's MultirotorRpcLibClient.simGetImages() function. Note that AirSim's documentation specifies the frames are RGB, but their implementation is actually BGR.
simGetImages()
returns a struct with a property that holds a uint8 vector. The default dimensions of the frames are 256 width by 144 height in pixels. Each pixel has three channels, so to total size is 110592 bytes.
I've validated the raw BGR data generated by AirSim and every frame shows up with the correct total size and valid values for each pixel channel. The camera is stationary, so the frames can be expected to have similar values in each pixel. This appears to be the case for various ranges of pixels - which should indicate that the raw image data generated by AirSim isn't likely shifting around.
My pipeline is: appsrc -> queue -> rawvideoparse -> videoconvert -> autovideosink
The raw BGR bytes are loaded into a buffer and pushed to appsrc. Each buffer contains one full frame.
The pipeline does stream the frames to autovideosink, but they are often shifted left or distorted. Is there an issue with the way data is being loaded into the buffer and sent to appsrc? Alternatively, are there additional caps that could be specified to help gstreamer correctly render the frames?
Here is my current pipeline code with some checks removed for brevity. The current frame rate is set to 1hz to aid in checking each frame.
typedef struct _PipelineData {
GstElement *pipeline, *app_source, *app_sink, *queue_0;
GMainLoop *main_loop; /* GLib's Main Loop */
} PipelineData;
static int runGstreamer(int *argc, char **argv[], PipelineData *data) {
GstBus *bus;
guint bus_watch_id;
// initialize gstreamer
gst_init(argc, argv);
// Create the elements
data->app_source = gst_element_factory_make ("appsrc", "video_source");
data->queue_0 = gst_element_factory_make ("queue", "queue_0");
data->app_sink = gst_element_factory_make ("autovideosink", "video_sink");
// create empty pipeline
data->pipeline = gst_pipeline_new ("video-pipeline");
// element configuration goes here
g_object_set(G_OBJECT(data->app_source),
"format", 3,
"is-live", true,
NULL);
// link elements
gst_bin_add_many(
GST_BIN (data->pipeline),
data->app_source,
data->queue_0,
data->app_sink,
NULL);
GstCaps *caps_source;
caps_source = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"framerate", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, 256,
"height", G_TYPE_INT, 144,
NULL);
if (!gst_element_link_filtered(data->app_source, data->queue_0, caps_source)) {
g_printerr("Elements app_source and queue_0 could not be linked.\n");
gst_object_unref (data->pipeline);
return -1;
}
gst_caps_unref(caps_source);
if (gst_element_link_many (data->queue_0, data->app_sink, NULL) != TRUE) {
g_printerr("Elements could not be linked.\n");
gst_object_unref (data->pipeline);
return -1;
}
// start playing the pipeline
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
// create and start main loop
// add a message handler
data->main_loop = g_main_loop_new (NULL, FALSE);
bus = gst_pipeline_get_bus (GST_PIPELINE (data->pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, data->main_loop);
gst_object_unref (bus);
g_main_loop_run (data->main_loop);
// Free resources
gst_element_set_state (data->pipeline, GST_STATE_NULL);
gst_object_unref (data->pipeline);
return 0;
}
Here is the function where the image data is loaded into an appsrc buffer:
static void sendImageStream(PipelineData * pipelineData, int fps) {
printf("Milliseconds between frames: %d\n", (int)((1 / (float) fps) * 1e3));
unsigned long frame_count = 1;
while(1) {
vector<uint8_t> newImage = getOneImage(frame_count);
// check that appsrc element is created in gstreamer thread before using
if (pipelineData->app_source) {
GstBuffer *buffer;
GstMapInfo map;
GstFlowReturn ret;
// create buffer and allocate memory
buffer = gst_buffer_new_allocate(NULL, (gint)newImage.size(), NULL);
// set image presentation timestamp in nanoseconds
GST_BUFFER_TIMESTAMP(buffer) = frame_count * 1e9 / fps;
// fill writable map with (ideally writable) memory blocks in the buffer
gst_buffer_map(buffer, &map, GST_MAP_WRITE);
map.data = newImage.data();
map.size = newImage.size();
ret = gst_app_src_push_buffer(GST_APP_SRC(pipelineData->app_source), buffer);
// release buffer memory that was associated with map
gst_buffer_unmap(buffer, &map);
// see flow error type of GstFlowReturn
if (ret != 0) {
g_print("\nPush appsrc buffer flow error: %d\n", ret);
}
}
else {
std::cout << "AppSrc element not yet created - image skipped" << std::endl;
}
// std::cout << "\nImage unit8 size: " << newImage.size() << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds((int)((1 / (float) fps) * 1e3)));
frame_count++;
}
}
Finally, here is main where the gstreamer pipeline is run and the images are loaded into appsrc in a separate thread:
int main(int argc, char *argv[]) {
PipelineData data = {};
std::thread feedAppSrc(sendImageStream, &data, 1);
int pipelineStatus = runGstreamer(&argc, &argv, &data);
if (!pipelineStatus) {
feedAppSrc.join();
}
if (pipelineStatus) {
std::cout << "\nPipeline failed to run: terminating feedAppSrc and the program" << std::endl;
}
return pipelineStatus;
}
Upvotes: 0
Views: 520
Reputation: 231
The issue was with the way the raw image bytes were being added to the buffer via the GstMapInfo map object.
newImage.data()
returns a pointer to the underlying array of the newImage vector.
So
map.data = newImage.data();
was overriding the gstreamer buffer's memory address with the memory address of newImage.
Instead, the data stored at newImage.data() needed to be copied into the buffer's memory:
memcpy(map.data, newImage.data(), newImage.size());
Upvotes: 0