How to program videomixer using Gstreamer C API - c

I am trying to simulate the following gstreamer pipeline using the C API:
gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
videotestsrc pattern=0 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix.
So far I have:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline;
GstElement *source1,*source2;
GstElement *scale,*filter;
GstElement *videobox1,*videobox2; //just one.
GstElement *mixer,*clrspace,*sink;
GstCaps *filtercaps;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/*if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}*/
//gst-launch videotestsrc pattern=snow ! ximagesink
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("videotestsrc", "source1");
source2 = gst_element_factory_make ("videotestsrc", "source2");
// source2 = gst_element_factory_make ("uridecodebin", "file-source2");
scale = gst_element_factory_make ("videoscale", "scale");
filter = gst_element_factory_make("capsfilter","filter");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
mixer = gst_element_factory_make ("videomixer", "mixer");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
// demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
// decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
// conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("xvimagesink", "sink");
/*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}*/
if (!pipeline || !source1 || !source2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
//gst_caps_unref (filtercaps);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
g_object_set (G_OBJECT (source2), "pattern", 1, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);
/* we link the elements together */
//gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
//gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have also set debugging on: export GST_DEBUG=3
When I run my program I get the following error:
Running...
0:00:00.178663884 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746 4797 0x893ae00 WARN basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907 4797 0x893ae00 WARN basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline
Why is it complaining about source2 not-linked?

A little late, but may be helpful:
If you look at the documentation for the videomixer element, you'll see that videomixer's sink pads are request pads. You need to create these pads before linking them.
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);
Similarly request pads are created for as many streams as you want.

gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);
Please also learn about the x,y,z properties on videomixerpad, you can spare the videobox elements this way and gain performance

Related

elements could not be linked, which elements to link?(gstreamer c)

I've been studying about gstreamer and I don't understand where I got it wrong but every pipeline I tried turning code would give me 'Elements could not be linked' and I'm running this code on ubuntu 20.04 which has an upstream kernel version 5.13.0-35(which I think is not a concern) with gcc 9 and here is the code:
#include <gst/gst.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstCaps *caps;
GstElement *depay;
GstElement *parse;
GstElement *decode;
GstElement *convert;
GstElement *sink;
} CustomData;
int main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("udpsrc", "source");
data.caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 90000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
data.depay = gst_element_factory_make ("rtph264depay", "depay");
data.parse = gst_element_factory_make ("h264parse", "parse");
data.decode = gst_element_factory_make ("decodebin", "decode");
data.convert = gst_element_factory_make ("videoconvert", "convert");
data.sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.depay || !data.parse || !data.decode || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL);
if (gst_element_link_many (data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the port and caps to play */
g_object_set (data.source, "port", 5000, NULL);
g_object_set (data.source, "caps", data.caps, NULL);
g_object_set (data.sink, "sync", FALSE, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
the recieving pipeline is:
gst-launch-1.0 -v udpsrc port=5000 ! "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false
And the sender is:
gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux ! h264parse ! avdec_h264 ! x264enc ! rtph264pay ! udpsink host=$HOST port=5000
Thanks in advance.
You may try using gst_parse_launch() that will negociate caps as gst-launch does:
const gchar *pipeline_str = "udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false";
GstElement *pipeline = gst_parse_launch (pipeline_str, NULL);
if (!pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
...
Otherwise, you may have to add capsfilters.

Linking Gstrplaybin to custom video sink videoconvert and sink not linking gstreamer

Trying to make gstreamer application from the pipeline: gst-launch-1.0 playbin uri=rtsp:// video-sink="videoconvert ! video/x-raw,width=720, height=480 ! ximagesink"
Getting error to link elements. Posting my code:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "convert");
videosink = gst_element_factory_make("ximagesink", "autovideosink");
/* set property value */
g_object_set (source, "uri", "rtsp:<file location>", NULL);
if (!pipeline || !source || !videoconvert || !videosink)
{
g_printerr ("Not all elements could be created.\n");
return;
}
gst_bin_add_many (GST_BIN(pipeline), videoconvert, videosink, NULL);
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
if (gst_element_link_many( videoconvert, videosink, NULL) != TRUE) {
g_print ("Failed to link some elements .....1 !\n");
gst_object_unref (pipeline);
return -1;
}
/* Start playing */
gst_element_set_state (source, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (source);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
It should be noted that the playbin element gives a full pipeline from src to sink, so the pipeline without the sink stuff would play perfectly:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test
The correct pipeline that you want to create is next:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test video-sink="videoconvert ! video/x-raw,width=320,height=240 ! ximagesink"
In order to embed this into GStreamer application, it is not necessary to link all the elements. Instead of that, the necessary steps to make are to build a custom video output bin and set it to playbin's video-sink property. In other words, the way to go is create a bin and link the necessary elements and then tell playbin to use it via it's video-sink property. It is also necessary to create a GhostPad for the bin and point it to the sink pad of the first element within the bin.
This is the result:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert, *customoutput;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean add_ok;
gboolean link_ok;
GstStateChangeReturn ret;
GMainLoop *loop;
/* Initialize GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 240,
NULL);
videosink = gst_element_factory_make("ximagesink", "videosink");
customoutput = gst_bin_new("customoutput");
// It is possible to create the bin like this
// Ghost pads on the bin for unlinked source or sink pads within the bin can automatically be created
// customoutput = gst_parse_bin_from_description ("videoconvert ! video/x-raw,width=320 ! ximagesink", TRUE, NULL);
gst_bin_add_many (GST_BIN (customoutput), videoconvert, videosink, NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
GstPad *sinkpad,*ghost_sinkpad;
sinkpad = gst_element_get_static_pad (videoconvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (customoutput, ghost_sinkpad);
/* set property value */
g_object_set (source, "video-sink", customoutput, NULL);
g_object_set (source, "uri", "rtsp://127.0.0.1:8551/test", NULL);
if (!pipeline || !source || !videoconvert || !capsFilter || !videosink || !customoutput)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN(pipeline), source,NULL);
// Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running...\n");
g_main_loop_run (loop);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
//bus = gst_element_get_bus (pipeline);
//msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
g_main_loop_unref (loop);
return 0;
}

creating a pipeline to transmit voice

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?

Joining the pictures in the same window

I want to display two pictures in the same window. This code displays them in two different windows.
Is there a solution for joining the two pictures in the same window?
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *pipeline,*freeze,*clrspace, *source1, *source2, *videobox1,*videobox2, *mixer,*sink,*queuevideo;
GstBus *bus;
loop = g_main_loop_new (NULL, FALSE);
gst_init (&argc, &argv);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("playbin2", "dec1");
source2 = gst_element_factory_make ("playbin2", "dec2");
freeze = gst_element_factory_make ("imagefreeze", "fr");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
mixer = gst_element_factory_make ("videomixer", "mixer");
queuevideo = gst_element_factory_make ("queue", "queue-video");
sink = gst_element_factory_make ("autovideosink", "sink");
if (!pipeline || !source1 || !source2 || !sink || !mixer ||!freeze || !clrspace || !queuevideo ) {
g_printerr ("One element could not be created. Exiting.\n");
exit(1);
}
g_object_set (source1, "uri", "http://www.logotheque.fr/6396-2/logo+RMC+INFO.jpg", NULL);
g_object_set (source2, "uri", "http://www.logotheque.fr/6396-2/logo+RMC+INFO.jpg", NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN(pipeline), source1,mixer, clrspace, freeze,videobox1, sink, NULL);
/* we link the elements together */
gst_element_link_many (source2, mixer, clrspace, freeze,videobox2,sink, NULL);
//gst_element_link_many(source[1], mixer, NULL);
g_signal_connect (source1, "pad-added", G_CALLBACK (on_pad_added), queuevideo);
g_signal_connect (source2, "pad-added", G_CALLBACK (on_pad_added), queuevideo);
/* Set the pipeline to "playing" state*/
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT(pipeline));
}
Here is how to combine two images with gstreamer from the command line :
gst-launch-1.0 uridecodebin uri=file:///home/meh/Pictures/questions.jpg ! videoscale ! video/x-raw, width=320, height=240 ! imagefreeze ! videomixer name=m sink_1::xpos=320 ! autovideosink uridecodebin uri=file:///home/meh/Pictures/testsrc.png ! videoscale ! video/x-raw, width=320, height=240 ! imagefreeze ! m.
Explanation :
We create two decoders for the images, resize them with videoscale to an arbitrary size (here 320 x 240), freeze them and send them to videomixer. videomixer has the x position of sink_1 set to 320, which offsets the first image so that the second one appears as well.

GStreamer caps filtering issue when converting from command line to C code

I am having issues converting my working GStreamer pipeline from a command line version to C code. From the command line the following command will successfully play my headerless mu-law audio file:
gst-launch filesrc location=test.ulaw ! audio/x-mulaw, rate=8000, channels=1 ! mulawdec ! audioconvert ! audioresample ! autoaudiosink
However, my issues are arising when trying to add in the "audio/x-mulaw, rate=8000, channels=1" bit into my C program. The program started off playing wav files (using wavparse in place of mulawdec) so that I know my base C code works, and it must just be that I am misinterpreting how the caps bit needs to be added in to make it work with mu-law files.
I am creating the caps, then using the gst_element_link_filtered to use this:
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
But this is not working, and running the program produces the following output:
>gst-mulaw.exe test.ulaw
Playing.
Error: Internal data flow error.
Playback Finished.
I would be grateful if anyone is able to help shed some light on what I am doing wrong. The full code is given below:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *gstSourceElement, GstPad *gstSourcePad, gpointer data) {
g_print("Linking dynamic pad.\n");
GstPad *gstSinkPad;
GstElement *gstSinkElement = (GstElement *) data;
gstSinkPad = gst_element_get_static_pad (gstSinkElement, "sink");
gst_pad_link (gstSourcePad, gstSinkPad);
gst_object_unref (gstSinkPad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *gstPipeline, *gstFileSource, *gstMuLawDecoder, *gstAudioConvert, *gstAudioResample, *gstAudioSink;
GstBus *bus;
// GStreamer initialisation.
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
// Check input arguments.
if (argc != 2) {
g_printerr ("Usage: %s <mu-law File>\n", argv[0]);
return -1;
}
// Create the GStreamer elements.
gstPipeline = gst_pipeline_new ("player");
gstFileSource = gst_element_factory_make ("filesrc", "filesource");
gstMuLawDecoder = gst_element_factory_make ("mulawdec", "mulawdecoder");
gstAudioConvert = gst_element_factory_make ("audioconvert", "audioconverter");
gstAudioResample = gst_element_factory_make ("audioresample", "audioresampler");
gstAudioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!gstPipeline || !gstFileSource || !gstMuLawDecoder || !gstAudioConvert || !gstAudioResample || !gstAudioSink) {
g_printerr ("An element could not be created. Exiting.\n");
return -1;
}
// Linke the filesrc object to that passed on the command line.
g_object_set (G_OBJECT (gstFileSource), "location", argv[1], NULL);
// Setup the GStreamer bus.
bus = gst_pipeline_get_bus (GST_PIPELINE (gstPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// Add the objects to the pipeline.
gst_bin_add_many (GST_BIN (gstPipeline), gstFileSource, gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
// Set the pipeline to state playing, and run the main loop.
g_print ("Playing.\n");
gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
// Finished playback, cleanup.
g_print ("Playback Finished.\n");
gst_element_set_state (gstPipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (gstPipeline));
return 0;
}
Thanks.
Try changing
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
to
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
pad-added handling is needed for elements with sometimes pads (e.g. demuxers). You can remove the on_pad_added callback function (it was not called anyway, right?) The pad-type can be see in "gst-inspect mulawdec" output.

Resources