Gstreamer-0.10 code failed to stream webcam video over udp - c

OS : Ubuntu
Gstreamer version 0.10
I have to develop an app to stream video from webcam to remote pc via udp.
I have written a small piece of code but it runs for a second and then throws error.
Error I am getting is
*Running...
Error: Internal data flow error.
Returned, stopping playback
Deleting pipeline*
Can someone please pinpoint me about my mistake.
Below is my code
GstElement *pipeline, *source, *sink, *muxer, *videoenc, *payloader, *udpsink;
GstBus *bus;
GMainLoop *loop;
// Initialize GStreamer
gst_init (&argc, &argv);
loop = g_main_loop_new( NULL, FALSE );
// Create the elements
source = gst_element_factory_make ("v4l2src", "source");
muxer = gst_element_factory_make ("qtdemux", "mux");
// videoenc = gst_element_factory_make("ffdec_mpeg4", "videoenc"); //why this failed
videoenc = gst_element_factory_make("ffmpegcolorspace", "videoenc");// but this passed but in both cases app failed to run
payloader = gst_element_factory_make("rtpmp4vpay", "rtpmp4vpay");
udpsink = gst_element_factory_make("udpsink", "udpsink");
// Create the empty pipeline
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source )
{
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
if( !muxer )
{
g_printerr ("failed to create muxer Exiting.\n");
return -1;
}
if( !videoenc)
{
g_printerr ("failedto create videoenc. Exiting.\n");
return -1;
}
if( !payloader || !udpsink)
{
{
g_printerr ("One element could not be created out of payloader or udpsink. Exiting.\n");
return -1;
}
}
g_object_set(G_OBJECT(payloader),
"config-interval", 0,
NULL);
g_object_set(G_OBJECT(udpsink),
"host", "127.0.0.1",
"port", 5000,
NULL);
// we add a message handler
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
//set der source
g_object_set (G_OBJECT ( source ), "device", "/dev/video0", NULL);
gst_bin_add_many (GST_BIN (pipeline), source, videoenc, payloader, udpsink, NULL);
gst_element_link_many (source, videoenc, payloader, udpsink, NULL);
// g_print("Linked all the Elements together\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
// Iterate
g_print ("Running...\n");
g_main_loop_run (loop);
// Out of the main loop, clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
}

Too long for comment, I will post answer and we will see what we get.
Do you definitely has to use 0.10? there is already version 1.6.1.. in Ubuntu 15.10 there is built in 1.6.. anyway..
I guess you are missing capsfilter:
GstElement *capsfilter = gst_element_factory_make("capsfilter", "camera_caps");
GstCaps *caps = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)YUY2,width=1280,height=720,framerate=25/1");
g_object_set (capsfilter, "caps", caps, NULL);
You should place it right after v4l2src element..
If this is not working, you can debug running your app with GST_DEBUG=default:4 which should print if there was linking problem.
You can also generate dot graph of pipeline and check if everything is linked properly..
You can speed up the debugging by rewriting the code in gst-launch shell command:
GST_DEBUG=3 gst-launch-0.10 v4l2src device=/dev/video0 ! video/x-raw-yuv,format=\(fourcc\)YUY2,width=1280,height=720,framerate=25/1 ! ffmpegcolorspace ! rtpmp4vpay ! queue ! udpsink port=5000 host=127.0.0.1

Related

Gstreamer x264enc invalid buffer size c

I've been trying to use this gstreamer c code(my system is running on ubuntu 20.04 with GStreamer 1.16.2 and gcc 9.4.0):
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
// gst-launch-1.0 videotestsrc pattern=ball ! 'video/x-raw, format=(string)I420, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! \
// ! queue ! nvvideoconvert ! nvv4l2h264enc bitrate=1000000 ! rtph264pay ! udpsink host=192.168.0.1 port=5000
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *filter, *queue, *converter, *encoder, *payer, *sink;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
filter = gst_element_factory_make ("capsfilter","filter");
queue = gst_element_factory_make ("queue","queue");
converter = gst_element_factory_make ("videoconvert","converter");
encoder = gst_element_factory_make ("x264enc","encoder");
payer = gst_element_factory_make ("rtph264pay","payer");
sink = gst_element_factory_make ("udpsink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("maxim-pipeline");
if (!pipeline || !source || !filter || !queue || !converter || !encoder || !payer || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, filter, queue, converter, encoder, payer, sink, NULL);
if (!gst_element_link_many (source,filter,queue,NULL)){
g_printerr ("Source->filter->queue problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many (queue,converter,encoder,NULL)){
g_printerr ("Queue->converter->encoder problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many(encoder,payer,sink,NULL)){
g_printerr ("Encoder->payer->sink problem\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the properties */
g_object_set (source, "location", "/home/thmsd/Videos/test.mkv", NULL);
g_object_set (encoder, "bitrate", 2000000, NULL);
g_object_set (sink, "host","192.168.0.1", NULL);
g_object_set (sink, "port",5000, NULL);
g_object_set (sink, "sync", "FALSE", NULL);
filtercaps = gst_caps_new_simple ("video/x-raw",
"format",G_TYPE_STRING,"I420",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate",GST_TYPE_FRACTION,30,1,
NULL);
g_object_set (filter, "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
But the following error happens which is actually referring to x264enc, and btw this code is edited, at first it was jetson compatible and had nvv4l2h264enc:
The test video that I'm trying to stream has the following properties:
First note that gstreamer booleans are not typed with capitals, so use true and false or just use 0 and 1.
x264enc may not be that fast on Jetson. You may try the following :
#include <gst/gst.h>
int main (gint argc, gchar * argv[])
{
gst_init (&argc, &argv);
GMainLoop *loop = g_main_loop_new (NULL, FALSE);
GError *error = NULL;
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvv4l2h264enc bitrate=20000000 insert-sps-pps=1 insert-vui=1 idrinterval=15 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
/* x264enc may be much slower :*/
/*
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvvidconv ! video/x-raw ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
*/
/* Create the pipeline */
GstElement *pipeline = gst_parse_launch (gst_pipeline_str, &error);
if (error || !pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
/* This will output changes in terminal, you may remove it later to make it quiet. */
g_signal_connect(pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify), NULL);
/* Ok, successfully created the pipeline, now start it */
gst_element_set_state (pipeline, GST_STATE_READY);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
exit(-2);
}
/* You may have to further manage bus for EOS... */
g_print ("Running ...\n");
g_main_loop_run (loop);
return 0;
}
This is what I've successfully tested on Jetson (AGX Xavier running L4T R32.6.1).
You would adapt to your file source and receiver address/port, and save as test_transcode_MKV_to_RTPH264.c then build with:
gcc -Wall -o test_transcode_MKV_to_RTPH264 test_transcode_MKV_to_RTPH264.c `pkg-config --cflags --libs gstreamer-1.0 gobject-2.0 glib-2.0`
and test streaming:
./test_transcode_MKV_to_RTPH264
Then if receiver has gstreamer installed, you should be able to display with something like:
gst-launch-1.0 udpsrc port=5004 ! application/x-rtp,encoding-name=H264 ! rtpjitterbuffer latency=300 ! rtph264depay ! decodebin ! autovideosink
For receiving RTPH264 with FFMPEG or VLC, you may have to create a SDP file.
EDIT: for non-NVIDIA case, you may try:
filesrc location=test_h265.mkv ! matroskademux ! h265parse ! avdec_h265 ! videoconvert ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 key-int-max=30 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004

Gstreamer RTSP `gst-launch-1.0` Equivalent C Code

I am currently working on a project using NVIDIA Deepstream that involves GStreamer. When I tried to change the source element from "filesrc" element to "rtspsrc" element and adding on "rtph264depay" and "queue", it resulted in
0:00:09.533730268 19680 0x7fc02c0025e0 WARN basesrc gstbasesrc.c:2948:gst_base_src_loop:<udpsrc0> error: Internal data flow error.
0:00:09.533772178 19680 0x7fc02c0025e0 WARN basesrc gstbasesrc.c:2948:gst_base_src_loop:<udpsrc0> error: streaming task paused, reason not-linked (-1)
ERROR from element udpsrc0: Internal data flow error.
Error: Internal data flow
I thought this might have been caused due to the codes preceding (TCP server connection) or following (NVIDIA hardware utilizing elements) the "source" element. To test that I was on the right direction, I tried running
gst-launch-1.0 rtspsrc location=rtsp://192.168.0.71:8554/h264ESVideoTest ! rtph264depay ! queue ! h264parse ! avdec_h264 ! videoconvert ! videoscale ! autovideosink
which managed to show the stream and at the same time its equivalent code in C as attached below
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:{
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc, char *argv[])
{
GMainLoop *loop = NULL;
GstElement *pipeline = NULL,
*source = NULL,
*rtpdepay = NULL,
*vidqueue = NULL,
*h264parser = NULL,
*decoder = NULL,
*vidconvert = NULL,
*vidscale = NULL,
*sink = NULL;
GstBus *bus = NULL;
guint bus_watch_id;
GstCaps *caps1 = NULL, *caps2 = NULL;
gulong osd_probe_id = 0;
GstPad *osd_sink_pad = NULL;
/* GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("pipeline");
source = gst_element_factory_make ("rtspsrc", "file-source");
rtpdepay = gst_element_factory_make ("rtph264depay", "rtpdepay");
vidqueue = gst_element_factory_make ("queue", "vidqueue");
h264parser = gst_element_factory_make ("h264parse", "h264parser");
decoder = gst_element_factory_make ("avdec_h264", "avh264decoder");
vidconvert = gst_element_factory_make ("videoconvert", "vidconvert");
vidscale = gst_element_factory_make ("videoscale", "vidscale");
sink = gst_element_factory_make ("autovideosink", "sink");
/* Check elements creation */
if (!pipeline ||
!source ||
!rtpdepay ||
!vidqueue ||
!h264parser ||
!decoder ||
!vidconvert ||
!vidscale ||
!sink) {
g_printerr ("One or more element could not be created. Exiting.\n");
return -1;
}
/* Set input location to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
/* Add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* Add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source,
rtpdepay,
vidqueue,
h264parser,
decoder,
vidconvert,
vidscale,
sink,
NULL);
/* Link the elements together */
gst_element_link_many (source,
rtpdepay,
vidqueue,
h264parser,
decoder,
vidconvert,
vidscale,
sink,
NULL);
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
which resulted in the same error as before.
I was under the impression that any properties that doesn't require setting inside the gst-launch-1.0 command would not require its equivalent in C code as well. Are there any properties of "rtspsrc" that requires setting in C but gst-launch-1.0 does automatically? Or am I making another kind of mistake altogether?
EDIT1:
attached is the explicit error log for the C code
0:00:00.095045906 19967 0x7f60c401d8f0 FIXME default gstutils.c:3766:gst_pad_create_stream_id_internal:<fakesrc0:src> Creating random stream-id, consider implementing a deterministic way of creating a stream-id
0:00:00.135622983 19967 0x7f60b80031e0 WARN basesrc gstbasesrc.c:2948:gst_base_src_loop:<udpsrc1> error: Internal data flow error.
0:00:00.135662497 19967 0x7f60b80031e0 WARN basesrc gstbasesrc.c:2948:gst_base_src_loop:<udpsrc1> error: streaming task paused, reason not-linked (-1)
ERROR from element udpsrc1: Internal data flow error.
Error: Internal data flow error.
Returned, stopping playback
0:00:00.136197250 19967 0x1c9ba30 WARN rtspsrc gstrtspsrc.c:5483:gst_rtspsrc_try_send:<file-source> send interrupted
0:00:00.136228722 19967 0x1c9ba30 WARN rtspsrc gstrtspsrc.c:7552:gst_rtspsrc_pause:<file-source> PAUSE interrupted
You should link source to to sink with "pad-added" signal. Check this: RTSP pipeline implemented via C code not working?.

Gstreamer1.0 : link a decodebin to videoconvert

I have the following pipeline which works fine:
gst-launch-1.0 -v filesrc location=/home/Videos/sample_h264.mov ! decodebin ! videoconvert ! autovideosink
I want to write a C program to do the same thing. So I translated the previous pipeline to the following code:
pipeline = gst_pipeline_new ("video_pipeline");
if (!pipeline) {
g_print("Failed to create the pipeline\n");
return -1;
}
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
source = gst_element_factory_make ("filesrc", "file-source");
decoder = gst_element_factory_make ("decodebin", "standard-decoder");
converter = gst_element_factory_make ("videoconvert", "converter");
sink = gst_element_factory_make ("autovideosink", "video-sink");
if (!source || !decoder || !converter || !sink) {
g_print("Failed to create one or more pipeline elements\n");
return -1;
}
g_object_set(G_OBJECT(source), "location", file_name, NULL);
gst_bin_add_many (GST_BIN (pipeline), source, decoder, converter, sink, NULL);
if (!gst_element_link_many (source, decoder, converter, sink, NULL)) {
g_print ("Failed to link some elements!\n");
return -1;
}
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
But I get error when I try to connect the decoder to the converter. Why it works fine with the command line but not with C code?
Decodebin uses something called a "sometimes-pad", which is basically a pad that will show up when a certain condition is met, in decodebins case that is media being decoded. gst-launch will do this sort of thing automagically, but in code you need to register a callback, and then link the pad in that callback. See also: GStreamer: how to connect dynamic pads
As #HarvardGraff said, decodebin has no static src pads (see gst-inspect decodebin).
But you can use launch-strings in your app as well. That way GStreamer should handle all the linking):
GstError *error = NULL;
GstElement *pipeline = gst_parse_launch("filesrc name=src ! decodebin ! videoconvert ! autovideosink", &error);
if (!error) {
GstElement filesrc = gst_bin_get_by_name(GST_BIN(pipeline), "src");
g_object_set(filesrc, "location", "/home/Videos/sample_h264.mov", NULL);
}

GStreamer caps filtering issue when converting from command line to C code

I am having issues converting my working GStreamer pipeline from a command line version to C code. From the command line the following command will successfully play my headerless mu-law audio file:
gst-launch filesrc location=test.ulaw ! audio/x-mulaw, rate=8000, channels=1 ! mulawdec ! audioconvert ! audioresample ! autoaudiosink
However, my issues are arising when trying to add in the "audio/x-mulaw, rate=8000, channels=1" bit into my C program. The program started off playing wav files (using wavparse in place of mulawdec) so that I know my base C code works, and it must just be that I am misinterpreting how the caps bit needs to be added in to make it work with mu-law files.
I am creating the caps, then using the gst_element_link_filtered to use this:
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
But this is not working, and running the program produces the following output:
>gst-mulaw.exe test.ulaw
Playing.
Error: Internal data flow error.
Playback Finished.
I would be grateful if anyone is able to help shed some light on what I am doing wrong. The full code is given below:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *gstSourceElement, GstPad *gstSourcePad, gpointer data) {
g_print("Linking dynamic pad.\n");
GstPad *gstSinkPad;
GstElement *gstSinkElement = (GstElement *) data;
gstSinkPad = gst_element_get_static_pad (gstSinkElement, "sink");
gst_pad_link (gstSourcePad, gstSinkPad);
gst_object_unref (gstSinkPad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *gstPipeline, *gstFileSource, *gstMuLawDecoder, *gstAudioConvert, *gstAudioResample, *gstAudioSink;
GstBus *bus;
// GStreamer initialisation.
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
// Check input arguments.
if (argc != 2) {
g_printerr ("Usage: %s <mu-law File>\n", argv[0]);
return -1;
}
// Create the GStreamer elements.
gstPipeline = gst_pipeline_new ("player");
gstFileSource = gst_element_factory_make ("filesrc", "filesource");
gstMuLawDecoder = gst_element_factory_make ("mulawdec", "mulawdecoder");
gstAudioConvert = gst_element_factory_make ("audioconvert", "audioconverter");
gstAudioResample = gst_element_factory_make ("audioresample", "audioresampler");
gstAudioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!gstPipeline || !gstFileSource || !gstMuLawDecoder || !gstAudioConvert || !gstAudioResample || !gstAudioSink) {
g_printerr ("An element could not be created. Exiting.\n");
return -1;
}
// Linke the filesrc object to that passed on the command line.
g_object_set (G_OBJECT (gstFileSource), "location", argv[1], NULL);
// Setup the GStreamer bus.
bus = gst_pipeline_get_bus (GST_PIPELINE (gstPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// Add the objects to the pipeline.
gst_bin_add_many (GST_BIN (gstPipeline), gstFileSource, gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
// Set the pipeline to state playing, and run the main loop.
g_print ("Playing.\n");
gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
// Finished playback, cleanup.
g_print ("Playback Finished.\n");
gst_element_set_state (gstPipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (gstPipeline));
return 0;
}
Thanks.
Try changing
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
to
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
pad-added handling is needed for elements with sometimes pads (e.g. demuxers). You can remove the on_pad_added callback function (it was not called anyway, right?) The pad-type can be see in "gst-inspect mulawdec" output.

How to program videomixer using Gstreamer C API

I am trying to simulate the following gstreamer pipeline using the C API:
gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
videotestsrc pattern=0 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix.
So far I have:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline;
GstElement *source1,*source2;
GstElement *scale,*filter;
GstElement *videobox1,*videobox2; //just one.
GstElement *mixer,*clrspace,*sink;
GstCaps *filtercaps;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/*if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}*/
//gst-launch videotestsrc pattern=snow ! ximagesink
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("videotestsrc", "source1");
source2 = gst_element_factory_make ("videotestsrc", "source2");
// source2 = gst_element_factory_make ("uridecodebin", "file-source2");
scale = gst_element_factory_make ("videoscale", "scale");
filter = gst_element_factory_make("capsfilter","filter");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
mixer = gst_element_factory_make ("videomixer", "mixer");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
// demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
// decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
// conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("xvimagesink", "sink");
/*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}*/
if (!pipeline || !source1 || !source2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
//gst_caps_unref (filtercaps);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
g_object_set (G_OBJECT (source2), "pattern", 1, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);
/* we link the elements together */
//gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
//gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have also set debugging on: export GST_DEBUG=3
When I run my program I get the following error:
Running...
0:00:00.178663884 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746 4797 0x893ae00 WARN basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907 4797 0x893ae00 WARN basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline
Why is it complaining about source2 not-linked?
A little late, but may be helpful:
If you look at the documentation for the videomixer element, you'll see that videomixer's sink pads are request pads. You need to create these pads before linking them.
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);
Similarly request pads are created for as many streams as you want.
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);
Please also learn about the x,y,z properties on videomixerpad, you can spare the videobox elements this way and gain performance

Resources