Gstreamer x264enc invalid buffer size c - c

I've been trying to use this gstreamer c code(my system is running on ubuntu 20.04 with GStreamer 1.16.2 and gcc 9.4.0):
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
// gst-launch-1.0 videotestsrc pattern=ball ! 'video/x-raw, format=(string)I420, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! \
// ! queue ! nvvideoconvert ! nvv4l2h264enc bitrate=1000000 ! rtph264pay ! udpsink host=192.168.0.1 port=5000
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *filter, *queue, *converter, *encoder, *payer, *sink;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
filter = gst_element_factory_make ("capsfilter","filter");
queue = gst_element_factory_make ("queue","queue");
converter = gst_element_factory_make ("videoconvert","converter");
encoder = gst_element_factory_make ("x264enc","encoder");
payer = gst_element_factory_make ("rtph264pay","payer");
sink = gst_element_factory_make ("udpsink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("maxim-pipeline");
if (!pipeline || !source || !filter || !queue || !converter || !encoder || !payer || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, filter, queue, converter, encoder, payer, sink, NULL);
if (!gst_element_link_many (source,filter,queue,NULL)){
g_printerr ("Source->filter->queue problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many (queue,converter,encoder,NULL)){
g_printerr ("Queue->converter->encoder problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many(encoder,payer,sink,NULL)){
g_printerr ("Encoder->payer->sink problem\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the properties */
g_object_set (source, "location", "/home/thmsd/Videos/test.mkv", NULL);
g_object_set (encoder, "bitrate", 2000000, NULL);
g_object_set (sink, "host","192.168.0.1", NULL);
g_object_set (sink, "port",5000, NULL);
g_object_set (sink, "sync", "FALSE", NULL);
filtercaps = gst_caps_new_simple ("video/x-raw",
"format",G_TYPE_STRING,"I420",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate",GST_TYPE_FRACTION,30,1,
NULL);
g_object_set (filter, "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
But the following error happens which is actually referring to x264enc, and btw this code is edited, at first it was jetson compatible and had nvv4l2h264enc:
The test video that I'm trying to stream has the following properties:

First note that gstreamer booleans are not typed with capitals, so use true and false or just use 0 and 1.
x264enc may not be that fast on Jetson. You may try the following :
#include <gst/gst.h>
int main (gint argc, gchar * argv[])
{
gst_init (&argc, &argv);
GMainLoop *loop = g_main_loop_new (NULL, FALSE);
GError *error = NULL;
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvv4l2h264enc bitrate=20000000 insert-sps-pps=1 insert-vui=1 idrinterval=15 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
/* x264enc may be much slower :*/
/*
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvvidconv ! video/x-raw ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
*/
/* Create the pipeline */
GstElement *pipeline = gst_parse_launch (gst_pipeline_str, &error);
if (error || !pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
/* This will output changes in terminal, you may remove it later to make it quiet. */
g_signal_connect(pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify), NULL);
/* Ok, successfully created the pipeline, now start it */
gst_element_set_state (pipeline, GST_STATE_READY);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
exit(-2);
}
/* You may have to further manage bus for EOS... */
g_print ("Running ...\n");
g_main_loop_run (loop);
return 0;
}
This is what I've successfully tested on Jetson (AGX Xavier running L4T R32.6.1).
You would adapt to your file source and receiver address/port, and save as test_transcode_MKV_to_RTPH264.c then build with:
gcc -Wall -o test_transcode_MKV_to_RTPH264 test_transcode_MKV_to_RTPH264.c `pkg-config --cflags --libs gstreamer-1.0 gobject-2.0 glib-2.0`
and test streaming:
./test_transcode_MKV_to_RTPH264
Then if receiver has gstreamer installed, you should be able to display with something like:
gst-launch-1.0 udpsrc port=5004 ! application/x-rtp,encoding-name=H264 ! rtpjitterbuffer latency=300 ! rtph264depay ! decodebin ! autovideosink
For receiving RTPH264 with FFMPEG or VLC, you may have to create a SDP file.
EDIT: for non-NVIDIA case, you may try:
filesrc location=test_h265.mkv ! matroskademux ! h265parse ! avdec_h265 ! videoconvert ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 key-int-max=30 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004

Related

elements could not be linked, which elements to link?(gstreamer c)

I've been studying about gstreamer and I don't understand where I got it wrong but every pipeline I tried turning code would give me 'Elements could not be linked' and I'm running this code on ubuntu 20.04 which has an upstream kernel version 5.13.0-35(which I think is not a concern) with gcc 9 and here is the code:
#include <gst/gst.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstCaps *caps;
GstElement *depay;
GstElement *parse;
GstElement *decode;
GstElement *convert;
GstElement *sink;
} CustomData;
int main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("udpsrc", "source");
data.caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 90000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
data.depay = gst_element_factory_make ("rtph264depay", "depay");
data.parse = gst_element_factory_make ("h264parse", "parse");
data.decode = gst_element_factory_make ("decodebin", "decode");
data.convert = gst_element_factory_make ("videoconvert", "convert");
data.sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.depay || !data.parse || !data.decode || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL);
if (gst_element_link_many (data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the port and caps to play */
g_object_set (data.source, "port", 5000, NULL);
g_object_set (data.source, "caps", data.caps, NULL);
g_object_set (data.sink, "sync", FALSE, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
the recieving pipeline is:
gst-launch-1.0 -v udpsrc port=5000 ! "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false
And the sender is:
gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux ! h264parse ! avdec_h264 ! x264enc ! rtph264pay ! udpsink host=$HOST port=5000
Thanks in advance.
You may try using gst_parse_launch() that will negociate caps as gst-launch does:
const gchar *pipeline_str = "udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false";
GstElement *pipeline = gst_parse_launch (pipeline_str, NULL);
if (!pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
...
Otherwise, you may have to add capsfilters.

creating a pipeline to transmit voice

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?

display image using gstreamer with c API

i try to do a gstreamer pipeline using c API to show image for this i use this gst-launch command
gst-launch filesrc location="pluto.jpg" ! jpegdec ! ffmpegcolorspace ! videobalance saturation=0 ! freeze ! ximagesink
when i try it it work fine but when i try to convert it to c code it doesn't work someone can help me please ?
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *jpdec, *imgf, *cod, *source, *sink;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
sink = gst_element_factory_make ("ximagesink", "sink");
jpdec = gst_element_factory_make ("jpegdec", "jdec");
imgf = gst_element_factory_make ("imagefreeze", "freeze");
cod = gst_element_factory_make ("ffmpegcolorspace", "ffmdec");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !jpdec || !imgf || !cod) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, jpdec, cod, imgf, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (G_OBJECT (source), "location","pluto.jpg", NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
there is the c code that i use to play image
when i compile the code i don't have errors but when i run it i have this erreur :
(test:5355): GStreamer-CRITICAL **: gst_caps_get_structure: assertion `GST_IS_CAPS (caps)' failed
(test:5355): GStreamer-CRITICAL **: gst_structure_get_int: assertion `structure != NULL' failed
Error received from element sink: Failed to create output image buffer of 0x0 pixels
Debugging information: ximagesink.c(472): gst_ximagesink_ximage_new (): /GstPipeline:test-pipeline/GstXImageSink:sink:
could not get shared memory of 0 bytes
Your gst_element_link is wrong. Something like:
if (gst_element_link_many (source, jpdec, cod, imgf, sink, NULL) != TRUE)
should work.
Those errors are likely a bug in xvimagesink, but you are using it wrongly. Feel free to report a bug at bugzilla.gnome.org about these assertions in case they happen with 1.0.

Gstreamer1.0 : link a decodebin to videoconvert

I have the following pipeline which works fine:
gst-launch-1.0 -v filesrc location=/home/Videos/sample_h264.mov ! decodebin ! videoconvert ! autovideosink
I want to write a C program to do the same thing. So I translated the previous pipeline to the following code:
pipeline = gst_pipeline_new ("video_pipeline");
if (!pipeline) {
g_print("Failed to create the pipeline\n");
return -1;
}
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
source = gst_element_factory_make ("filesrc", "file-source");
decoder = gst_element_factory_make ("decodebin", "standard-decoder");
converter = gst_element_factory_make ("videoconvert", "converter");
sink = gst_element_factory_make ("autovideosink", "video-sink");
if (!source || !decoder || !converter || !sink) {
g_print("Failed to create one or more pipeline elements\n");
return -1;
}
g_object_set(G_OBJECT(source), "location", file_name, NULL);
gst_bin_add_many (GST_BIN (pipeline), source, decoder, converter, sink, NULL);
if (!gst_element_link_many (source, decoder, converter, sink, NULL)) {
g_print ("Failed to link some elements!\n");
return -1;
}
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
But I get error when I try to connect the decoder to the converter. Why it works fine with the command line but not with C code?
Decodebin uses something called a "sometimes-pad", which is basically a pad that will show up when a certain condition is met, in decodebins case that is media being decoded. gst-launch will do this sort of thing automagically, but in code you need to register a callback, and then link the pad in that callback. See also: GStreamer: how to connect dynamic pads
As #HarvardGraff said, decodebin has no static src pads (see gst-inspect decodebin).
But you can use launch-strings in your app as well. That way GStreamer should handle all the linking):
GstError *error = NULL;
GstElement *pipeline = gst_parse_launch("filesrc name=src ! decodebin ! videoconvert ! autovideosink", &error);
if (!error) {
GstElement filesrc = gst_bin_get_by_name(GST_BIN(pipeline), "src");
g_object_set(filesrc, "location", "/home/Videos/sample_h264.mov", NULL);
}

How to program videomixer using Gstreamer C API

I am trying to simulate the following gstreamer pipeline using the C API:
gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
videotestsrc pattern=0 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix.
So far I have:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline;
GstElement *source1,*source2;
GstElement *scale,*filter;
GstElement *videobox1,*videobox2; //just one.
GstElement *mixer,*clrspace,*sink;
GstCaps *filtercaps;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/*if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}*/
//gst-launch videotestsrc pattern=snow ! ximagesink
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("videotestsrc", "source1");
source2 = gst_element_factory_make ("videotestsrc", "source2");
// source2 = gst_element_factory_make ("uridecodebin", "file-source2");
scale = gst_element_factory_make ("videoscale", "scale");
filter = gst_element_factory_make("capsfilter","filter");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
mixer = gst_element_factory_make ("videomixer", "mixer");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
// demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
// decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
// conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("xvimagesink", "sink");
/*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}*/
if (!pipeline || !source1 || !source2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
//gst_caps_unref (filtercaps);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
g_object_set (G_OBJECT (source2), "pattern", 1, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);
/* we link the elements together */
//gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
//gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have also set debugging on: export GST_DEBUG=3
When I run my program I get the following error:
Running...
0:00:00.178663884 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746 4797 0x893ae00 WARN basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907 4797 0x893ae00 WARN basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline
Why is it complaining about source2 not-linked?
A little late, but may be helpful:
If you look at the documentation for the videomixer element, you'll see that videomixer's sink pads are request pads. You need to create these pads before linking them.
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);
Similarly request pads are created for as many streams as you want.
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);
Please also learn about the x,y,z properties on videomixerpad, you can spare the videobox elements this way and gain performance

Resources