Hi,
I am using gstreamer to play recorded video with audio in colibri imx6dl processor by cross compiling the below code
#include <gst/gst.h>
void on_pad_added(GstElement *src, GstPad *new_pad, GstElement *pipeline);
int main(int argc, char *argv) {
GstElement *pipeline, *filesrc, *decodebin, *video_queue, *v4l2convert, *videosink, *audio_queue, *audioconvert, *audioresample, *audiosink;
GstBus *bus;
GstMessage *msg;
// Initialize GStreamer
gst_init(&argc, &argv);
// Create the elements
filesrc = gst_element_factory_make("filesrc", "file-source");
decodebin = gst_element_factory_make("decodebin", "decoder");
video_queue = gst_element_factory_make("queue", "video-queue");
v4l2convert = gst_element_factory_make("v4l2convert", "v4l2-converter");
videosink = gst_element_factory_make("autovideosink", "video-output");
audio_queue = gst_element_factory_make("queue", "audio-queue");
audioconvert = gst_element_factory_make("audioconvert", "audio-converter");
audioresample = gst_element_factory_make("audioresample", "audio-resampler");
audiosink = gst_element_factory_make("autoaudiosink", "audio-output");
// Check if elements are created properly
if (!filesrc || !decodebin || !video_queue || !v4l2convert || !videosink || !audio_queue || !audioconvert || !audioresample || !audiosink) {
g_printerr("Not all elements could be created.\n");
return -1;
}
// Create the empty pipeline
pipeline = gst_pipeline_new("video-audio-player");
if (!pipeline) {
g_printerr("Pipeline could not be created.\n");
return -1;
}
// Set the source file location
g_object_set(G_OBJECT(filesrc), "location", "video.mp4", NULL);
// Build the pipeline
gst_bin_add_many(GST_BIN(pipeline), filesrc, decodebin, video_queue, v4l2convert, videosink, audio_queue, audioconvert, audioresample, audiosink, NULL);
// Link the file source to the decoder
if (!gst_element_link(filesrc, decodebin)) {
g_printerr("File source and decoder could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
// Link the video elements (the actual linking happens in the decodebin's pad-added signal handler)
if (!gst_element_link_many(video_queue, v4l2convert, videosink, NULL)) {
g_printerr("Video elements could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
// Link the audio elements (the actual linking happens in the decodebin's pad-added signal handler)
if (!gst_element_link_many(audio_queue, audioconvert, audioresample, audiosink, NULL)) {
g_printerr("Audio elements could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
// Connect to the pad-added signal of decodebin
g_signal_connect(decodebin, "pad-added", G_CALLBACK(on_pad_added), pipeline);
// Start playing
gst_element_set_state(pipeline, GST_STATE_PLAYING);
// Wait until error or EOS (End of Stream)
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
// Parse message
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
break;
case GST_MESSAGE_EOS:
g_print("End-Of-Stream reached.\n");
break;
default:
g_printerr("Unexpected message received.\n");
break;
}
gst_message_unref(msg);
}
// Free resources
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
void on_pad_added(GstElement *src, GstPad *new_pad, GstElement *pipeline) {
GstPad *sink_pad_video = gst_element_get_static_pad(gst_bin_get_by_name(GST_BIN(pipeline), “video-queue”), “sink”);
GstPad *sink_pad_audio = gst_element_get_static_pad(gst_bin_get_by_name(GST_BIN(pipeline), “audio-queue”), “sink”);
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));
// Check the new pad's type
new_pad_caps = gst_pad_get_current_caps(new_pad);
new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
new_pad_type = gst_structure_get_name(new_pad_struct);
if (g_str_has_prefix(new_pad_type, "video/x-raw")) {
// Attempt the link for video
ret = gst_pad_link(new_pad, sink_pad_video);
if (GST_PAD_LINK_FAILED(ret)) {
g_printerr("Type is '%s' but link failed for video.\n", new_pad_type);
} else {
g_print("Link succeeded (type '%s' for video).\n", new_pad_type);
}
} else if (g_str_has_prefix(new_pad_type, "audio/x-raw")) {
// Attempt the link for audio
ret = gst_pad_link(new_pad, sink_pad_audio);
if (GST_PAD_LINK_FAILED(ret)) {
g_printerr("Type is '%s' but link failed for audio.\n", new_pad_type);
} else {
g_print("Link succeeded (type '%s' for audio).\n", new_pad_type);
}
} else {
g_print("It has type '%s' which is not handled. Ignoring.\n", new_pad_type);
}
if (new_pad_caps != NULL) {
gst_caps_unref(new_pad_caps);
}
gst_object_unref(sink_pad_video);
gst_object_unref(sink_pad_audio);
}
Now I need to add UI to play and resume the video . For this I am using GTK3 dependency for UI things . By taking the reference of above logic , I created a sample application to embed gstreamer with GTK3 window by using gtkglsink , gtksinkbin and gtksink . Here I am attaching code regarding to the application to embed gstreamer with gtk3
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gdk/gdk.h>
int play_flag = 0;
/ Structure to contain all our information, so we can pass it around /
typedef struct _CustomData {
** GstElement pipeline; / Our pipeline /*
** GtkWidget sink_widget; / Widget where our video will be displayed /*
** GtkWidget slider; / Slider widget to keep track of current position /*
** GtkWidget streams_list; **
** gulong slider_update_signal_id; / Signal ID for the slider update signal /*
** GstState state; /* Current state of the pipeline /*
** gint64 duration; /* Duration of the clip, in nanoseconds /*
} CustomData;
**GstElement *filesrc, *decodebin, *video_queue, *v4l2convert, *videosink, *audio_queue, *audioconvert, audioresample, audiosink;
/ This function is called when the PLAY button is clicked /
static void play_cb(GtkButton button, CustomData data) {
** if (play_flag == 1) {
** gst_element_set_state(data->pipeline, GST_STATE_READY);**
** play_flag = 0;**
** }**
** gst_element_set_state(data->pipeline, GST_STATE_PLAYING);**
}
/ This function is called when the PAUSE button is clicked /
static void pause_cb(GtkButton button, CustomData data) {
** gst_element_set_state(data->pipeline, GST_STATE_PAUSED);
}
/ This function is called when the STOP button is clicked /
static void stop_cb(GtkButton button, CustomData data) {
** gst_element_set_state(data->pipeline, GST_STATE_READY);
}
/ This function is called when the main window is closed /
static void delete_event_cb(GtkWidget widget, GdkEvent event, CustomData data) {
** stop_cb(NULL, data);
** gtk_main_quit();*
}
/ This function is called when the slider changes its position. We perform a seek to the new position here. /
static void slider_cb(GtkRange range, CustomData data) {
** gdouble value = gtk_range_get_value(GTK_RANGE(data->slider));
** gst_element_seek_simple(data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,**
** (gint64)(value * GST_SECOND));**
}
/ Dynamic pad handler to link decodebin pads /
static void pad_added_handler(GstElement src, GstPad new_pad, CustomData data) {
** GstPad sink_pad = NULL;
** GstCaps new_pad_caps = NULL;
** GstStructure new_pad_struct = NULL;
** const gchar new_pad_type = NULL;*
** new_pad_caps = gst_pad_get_current_caps(new_pad);**
** new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);**
** new_pad_type = gst_structure_get_name(new_pad_struct);**
** if (g_str_has_prefix(new_pad_type, “video/x-raw”)) {**
** sink_pad = gst_element_get_static_pad(video_queue, “sink”);**
** } else if (g_str_has_prefix(new_pad_type, “audio/x-raw”)) {**
** sink_pad = gst_element_get_static_pad(audio_queue, “sink”);**
** }**
** if (gst_pad_is_linked(sink_pad)) {**
** g_object_unref(sink_pad);**
** if (new_pad_caps != NULL)**
** gst_caps_unref(new_pad_caps);**
** return;**
** }**
** if (gst_pad_link(new_pad, sink_pad) != GST_PAD_LINK_OK) {**
** g_printerr(“Type is ‘%s’ but link failed.\n”, new_pad_type);**
** } else {**
** g_print(“Link succeeded (type ‘%s’).\n”, new_pad_type);**
** }**
** if (new_pad_caps != NULL)**
** gst_caps_unref(new_pad_caps);**
** gst_object_unref(sink_pad);**
}
/ This creates all the GTK+ widgets that compose our application, and registers the callbacks /
static void create_ui(CustomData data) {
** GtkWidget main_window; / The uppermost window, containing all other windows /
** GtkWidget main_box; / VBox to hold main_hbox and the controls /*
** GtkWidget main_hbox; / HBox to hold the video sink and the stream info text widget /*
** GtkWidget controls; / HBox to hold the buttons and the slider /*
** GtkWidget *play_button, pause_button, stop_button; / Buttons /
** main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);**
** g_signal_connect(G_OBJECT(main_window), “delete-event”, G_CALLBACK(delete_event_cb), data);**
** play_button = gtk_button_new_from_icon_name(“media-playback-start”, GTK_ICON_SIZE_SMALL_TOOLBAR);**
** g_signal_connect(G_OBJECT(play_button), “clicked”, G_CALLBACK(play_cb), data);**
** pause_button = gtk_button_new_from_icon_name(“media-playback-pause”, GTK_ICON_SIZE_SMALL_TOOLBAR);**
** g_signal_connect(G_OBJECT(pause_button), “clicked”, G_CALLBACK(pause_cb), data);**
** stop_button = gtk_button_new_from_icon_name(“media-playback-stop”, GTK_ICON_SIZE_SMALL_TOOLBAR);**
** g_signal_connect(G_OBJECT(stop_button), “clicked”, G_CALLBACK(stop_cb), data);**
** data->slider = gtk_scale_new_with_range(GTK_ORIENTATION_HORIZONTAL, 0, 100, 1);**
** gtk_scale_set_draw_value(GTK_SCALE(data->slider), 0);**
** data->slider_update_signal_id = g_signal_connect(G_OBJECT(data->slider), “value-changed”, G_CALLBACK(slider_cb), data);**
** controls = gtk_box_new(GTK_ORIENTATION_HORIZONTAL, 0);**
** gtk_box_pack_start(GTK_BOX(controls), play_button, FALSE, FALSE, 2);**
** gtk_box_pack_start(GTK_BOX(controls), pause_button, FALSE, FALSE, 2);**
** gtk_box_pack_start(GTK_BOX(controls), stop_button, FALSE, FALSE, 2);**
** gtk_box_pack_start(GTK_BOX(controls), data->slider, TRUE, TRUE, 2);**
** main_hbox = gtk_box_new(GTK_ORIENTATION_HORIZONTAL, 0);**
** gtk_box_pack_start(GTK_BOX(main_hbox), data->sink_widget, TRUE, TRUE, 0);**
** main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);**
** gtk_box_pack_start(GTK_BOX(main_box), main_hbox, TRUE, TRUE, 0);**
** gtk_box_pack_start(GTK_BOX(main_box), controls, FALSE, FALSE, 0);**
** gtk_container_add(GTK_CONTAINER(main_window), main_box);**
** gtk_window_set_default_size(GTK_WINDOW(main_window), 480, 800);**
** gtk_widget_show_all(main_window);**
}
/ This function is called periodically to refresh the GUI /
static gboolean refresh_ui(CustomData data) {
** gint64 current = -1;*
** /* We do not want to update anything unless we are in the PAUSED or PLAYING states /*
** if (data->state < GST_STATE_PAUSED)**
** return TRUE;**
** /* If we didn’t know it yet, query the stream duration /*
** if (!GST_CLOCK_TIME_IS_VALID(data->duration)) {**
** if (!gst_element_query_duration(data->pipeline, GST_FORMAT_TIME, &data->duration)) {**
** g_printerr(“Could not query current duration.\n”);**
** } else {**
** /* Set the range of the slider to the clip duration, in SECONDS /*
** gtk_range_set_range(GTK_RANGE(data->slider), 0, (gdouble)data->duration / GST_SECOND);**
** }**
** }**
** if (gst_element_query_position(data->pipeline, GST_FORMAT_TIME, ¤t)) {**
** /* Block the “value-changed” signal, so the slider_cb function is not called**
** * (which would trigger a seek the user has not requested) /*
** g_signal_handler_block(data->slider, data->slider_update_signal_id);**
** /* Set the position of the slider to the current pipeline position, in SECONDS /*
** gtk_range_set_value(GTK_RANGE(data->slider), (gdouble)current / GST_SECOND);**
** /* Re-enable the signal /*
** g_signal_handler_unblock(data->slider, data->slider_update_signal_id);**
** }**
** return TRUE;**
}
/ This function is called when new metadata is discovered in the stream /
static void tags_cb (GstElement pipeline, gint stream, CustomData data) {
** / We are possibly in a GStreamer working thread, so we notify the main*
** * thread of this event through a message in the bus /*
** gst_element_post_message (pipeline,**
** gst_message_new_application (GST_OBJECT (pipeline),**
** gst_structure_new_empty (“tags-changed”)));**
}
/ This function is called when an error message is posted on the bus /
*static void error_cb (GstBus bus, GstMessage msg, CustomData data) {
** GError err;
** gchar debug_info;
** /* Print error details on the screen /*
** gst_message_parse_error (msg, &err, &debug_info);**
** g_printerr (“Error received from element %s: %s\n”, GST_OBJECT_NAME (msg->src), err->message);**
** g_printerr (“Debugging information: %s\n”, debug_info ? debug_info : “none”);**
** g_clear_error (&err);**
** g_free (debug_info);**
** /* Set the pipeline to READY (which stops playback) /*
** gst_element_set_state (data->pipeline, GST_STATE_READY);**
}
/ This function is called when an End-Of-Stream message is posted on the bus.*
** * We just set the pipeline to READY (which stops playback) /*
static void eos_cb (GstBus bus, GstMessage msg, CustomData data) {
** g_print (“End-Of-Stream reached.\n”);
** play_flag=1;*
** // gst_element_set_state (data->pipeline, GST_STATE_READY);**
}
/ This function is called when the pipeline changes states. We use it to*
** * keep track of the current state. /*
static void state_changed_cb (GstBus bus, GstMessage msg, CustomData data) {
** GstState old_state, new_state, pending_state;
** gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);*
** if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {**
** data->state = new_state;**
** g_print (“State set to %s\n”, gst_element_state_get_name (new_state));**
** if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {**
** /* For extra responsiveness, we refresh the GUI as soon as we reach the PAUSED state /*
** refresh_ui (data);**
** }**
** }**
}
int main(int argc, char argv[]) {
** CustomData data;*
** GstBus bus;*
** GstMessage msg;*
** GstElement gtkglsink;*
** /* Initialize GTK /*
** gtk_init(&argc, &argv);**
** /* Initialize GStreamer /*
** gst_init(&argc, &argv);**
** /* Initialize our data structure /*
** memset(&data, 0, sizeof(data));**
** data.duration = GST_CLOCK_TIME_NONE;**
** /* Create the elements /*
** data.pipeline = gst_pipeline_new(“video-audio-player”);**
** filesrc = gst_element_factory_make(“filesrc”, “file-source”);**
** decodebin = gst_element_factory_make(“decodebin”, “decoder”);**
** video_queue = gst_element_factory_make(“queue”, “video-queue”);**
** v4l2convert = gst_element_factory_make(“videoconvert”, “video-convert”);**
** videosink = gst_element_factory_make(“glsinkbin”, “glsinkbin”); //autovideosink video-output**
** audio_queue = gst_element_factory_make(“queue”, “audio-queue”);**
** audioconvert = gst_element_factory_make(“audioconvert”, “audio-convert”);**
** audioresample = gst_element_factory_make(“audioresample”, “audio-resample”);**
** audiosink = gst_element_factory_make(“autoaudiosink”, “audio-output”);**
** gtkglsink = gst_element_factory_make (“gtkglsink”, “gtkglsink”);**
** /* Here we create the GTK Sink element which will provide us with a GTK widget where**
** * GStreamer will render the video at and we can add to our UI.**
** * Try to create the OpenGL version of the video sink, and fallback if that fails /*
** if (gtkglsink != NULL && videosink != NULL) {**
** g_printerr (“Successfully created GTK GL Sink”);**
** g_object_set (videosink, “sink”, gtkglsink, NULL);**
** /* The gtkglsink creates the gtk widget for us. This is accessible through a property.**
** * So we get it and use it later to add it to our gui. /*
** g_object_get (gtkglsink, “widget”, &data.sink_widget, NULL);**
** } else {**
** g_printerr (“Could not create gtkglsink, falling back to gtksink.\n”);**
** videosink = gst_element_factory_make (“gtksink”, “gtksink”);**
** g_object_get (videosink, “widget”, &data.sink_widget, NULL);**
** }**
** if (!data.pipeline || !filesrc || !decodebin || !video_queue || !v4l2convert || !videosink ||**
** !audio_queue || !audioconvert || !audioresample || !audiosink) {**
** g_printerr(“Not all elements could be created.\n”);**
** return -1;**
** }**
** /* Set the file to play /*
** //g_object_set(G_OBJECT(filesrc), “location”, , NULL);**
** g_object_set(G_OBJECT(filesrc), “location”, “video.mp4”, NULL);**
** /* Build the pipeline. We add all elements into the pipeline /*
** gst_bin_add_many(GST_BIN(data.pipeline), filesrc, decodebin, video_queue, v4l2convert, videosink,**
** audio_queue, audioconvert, audioresample, audiosink, NULL);**
** /* Link the file source to the decoder /*
** if (!gst_element_link(filesrc, decodebin)) {**
** g_printerr(“Failed to link filesrc and decodebin.\n”);**
** gst_object_unref(data.pipeline);**
** return -1;**
** }**
** /* Connect to the pad-added signal /*
** g_signal_connect(decodebin, “pad-added”, G_CALLBACK(pad_added_handler), &data);**
** /* Link video elements /*
** if (!gst_element_link_many(video_queue, v4l2convert, videosink, NULL)) {**
** g_printerr(“Video elements could not be linked.\n”);**
** gst_object_unref(data.pipeline);**
** return -1;**
** }**
** /* Link audio elements /*
** if (!gst_element_link_many(audio_queue, audioconvert, audioresample, audiosink, NULL)) {**
** g_printerr(“Audio elements could not be linked.\n”);**
** gst_object_unref(data.pipeline);**
** return -1;**
** }**
** /* Create the GUI /*
** create_ui(&data);**
** /* Instruct the bus to emit signals for each received message, and connect to the interesting signals /*
** bus = gst_element_get_bus(data.pipeline);**
** gst_bus_add_signal_watch(bus);**
** g_signal_connect(bus, “message::error”, G_CALLBACK(error_cb), &data);**
** g_signal_connect(bus, “message::eos”, G_CALLBACK(eos_cb), &data);**
** g_signal_connect (G_OBJECT (bus), “message::state-changed”, (GCallback)state_changed_cb, &data);**
** // Start playing**
** gst_element_set_state(data.pipeline, GST_STATE_PLAYING);**
** /* Register a function that GLib will call every second /*
** g_timeout_add(1000, (GSourceFunc)refresh_ui, &data);**
** /* Start the GTK+ main loop. We will not regain control until gtk_main_quit is called. /*
** gtk_main();**
** /* Free resources /*
** gst_object_unref(bus);**
** gst_element_set_state(data.pipeline, GST_STATE_NULL);**
** gst_object_unref(data.pipeline);**
** return 0;**
}
It worked fine in my host system , but while running the same application in imx6dl processor , I am facing the issue
Could not create gtkglsink, falling back to gtksink.
**(video:816): GLib-GObject-CRITICAL : 05:55:36.716: g_object_get: assertion ‘G_IS_OBJECT (object)’ failed
Not all elements could be created.
So I checked all the plugins for gtkglsink , gtksink . It was not available for gstreamer in imx6dl processor. So I tried to add the plugins manually by using below commands using local.conf in yocto
IMAGE_INSTALL:append = " gstreamer1.0-plugins-bad gstreamer1.0-plugins-good
Even though , these plugins are not installed in the processor. Can you please guide me how to enable these plugins in imx6dl processor using yocto . Or any other alternative methods to merge gstreamer with gtkwindow will highly appreciable.
Thank you in advance.
Thanks & Regards,
Teena .