I have a basic gstreamer command for playing audio stream received from network:
gst-launch-1.0 tcpserversrc host=127.0.0.1 port=5000 ! decodebin ! audioconvert ! alsasink
I tried to convert it to a C program, but when I'm running it I get "Internal data flow error".
#include <gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
gint main (gint argc, gchar *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *src, *dec, *conv, *sink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* setup */
pipeline = gst_pipeline_new ("pipeline");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
src = gst_element_factory_make ("tcpserversrc", "source");
g_object_set (G_OBJECT (src), "host", "127.0.0.1",NULL);
g_object_set (G_OBJECT (src), "port", 5000 ,NULL);
dec = gst_element_factory_make ("decodebin", "decoder");
conv = gst_element_factory_make ("audioconvert", "aconv");
sink = gst_element_factory_make ("alsasink", "sink");
gst_bin_add_many (GST_BIN (pipeline), src, dec, conv, sink, NULL);
gst_element_link (src, dec);
gst_element_link (dec, conv);
gst_element_link (conv, sink);
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
/* cleanup */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
Here is the command which I'm using to test the server:
gst-launch-1.0 filesrc location="file.wav" ! tcpclientsink host=127.0.0.1 port=5000
Thanks.
Because decodebin's src pad is a dynamic (sometimes) pad. You have to connect decodebin to audioconvert when decodebin got its source pad.
You can see it with gst-inspect-1.0:
$ gst-inspect-1.0 decodebin
:
:
Pad Templates:
SRC template: 'src_%u'
Availability: Sometimes
Capabilities:
ANY
:
:
Add a callback function for pad-added to decodebin and link to audioconvert in the callback. A required change is basically this:
--- orig.c 2017-01-18 13:35:50.434605255 +0900
+++ new.c 2017-01-18 14:04:16.428847528 +0900
## -31,6 +31,21 ##
return TRUE;
}
+static void cb_new_pad (GstElement *element, GstPad *pad, gpointer data)
+{
+ gchar *name;
+ GstElement *other = data;
+
+ name = gst_pad_get_name (pad);
+ g_print ("A new pad %s was created for %s\n", name, gst_element_get_name(element));
+ g_free (name);
+
+ g_print ("element %s will be linked to %s\n",
+ gst_element_get_name(element),
+ gst_element_get_name(other));
+ gst_element_link(element, other);
+}
+
gint main (gint argc, gchar *argv[])
{
GMainLoop *loop;
## -59,9 +74,13 ##
gst_bin_add_many (GST_BIN (pipeline), src, dec, conv, sink, NULL);
gst_element_link (src, dec);
- gst_element_link (dec, conv);
gst_element_link (conv, sink);
+ /* you don't link them here */
+ /* gst_element_link (dec, conv); */
+ /* add call-back, instead */
+ g_signal_connect (dec, "pad-added", G_CALLBACK (cb_new_pad), conv);
+
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
Here is a link to a working code.
BTW, you don't have to do it by yourself but let gst_parse_launch() handle all of the above.
int main(int argc, char *argv[])
{
GstElement *pipeline;
GError *err = NULL;
GstBus *bus;
GMainLoop *loop;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_parse_launch("tcpserversrc host=127.0.0.1 port=5000 ! decodebin ! audioconvert ! alsasink", &err);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus(pipeline);
gst_bus_add_watch (bus, bus_call, loop);
g_main_loop_run(loop);
return 0;
}
There are some questions about dynamic pads on stackoverflow:
GStreamer: how to connect dynamic pads
g_signal_connect "pad-added" doesn't work
Related
gst-launch-1.0 --gst-debug-level=3 dvbsrc modulation='QAM 256'
frequency=147000000 ! decodebin name=demux demux. ! queue !
audioresample ! audioconvert ! voaacenc ! mux. mpegtsmux name=mux
! udpsink host=127.0.0.0 port=22 demux. ! queue ! videoconvert !
videoscale ! x264enc bitrate=1240 tune=zerolatency
!video/x-h264,stream-format=byte-stream,profile=high,width=540,height=380,key-int-max=15
! mux.
I want to convert into c with the same thing.
I am trying -:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (gint argc, gchar *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer,*dec,*vdqueue,*adqueue, *conv, *sink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* setup */
pipeline = gst_pipeline_new ("pipeline");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
source = gst_element_factory_make ("filesrc", "filesource");
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
g_printerr ("source is: %s \n",argv[1]);
//demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
vdqueue = gst_element_factory_make ("queue", "video-queue");
adqueue = gst_element_factory_make ("queue", "audio-queue");
dec = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("autoaudiosink", "audio-output");
/* g_object_set (pipeline->mux,"max-delay", (guint64)0,NULL);
g_object_set (pipeline->mux,"max-page-delay",(guint64)0,NULL);
*/
g_object_set (G_OBJECT (source), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB16",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL), NULL);
gst_bin_add_many (GST_BIN (pipeline),source,demuxer,vdqueue,adqueue,dec,conv,sink, NULL);
gst_element_link (source, demuxer);
gst_element_link (demuxer, vdqueue);
gst_element_link (vdqueue, adqueue);
gst_element_link (adqueue, dec);
gst_element_link (dec, conv);
gst_element_link (conv, sink);
/* run */
// g_printerr ("pipeline is: %s \n",pipeline);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
/* cleanup */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I am writing a media application to grab video frames from a video file. For this, I want to get the video properties before pulling the samples from the pipeline. So, I have added a callback for auto-plug signal at the decoder and trying to get the properties. These callbacks are not getting called even after I put the pipeline into playing state, but these are called if I try to pull a sample from the pipeline using gst_app_sink_pull_sample.
Am I missing anything here? My understanding is that these callbacks will get invoked when we put the pipeline into playing state.
#include <gst/gst.h>
#include <stdio.h>
static void bus_callback (GstBus *bus, GstMessage *msg, gpointer data)
{
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
default:
/* Unhandled message */
break;
}
}
static void
on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the decoder sink pad */
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static void
auto_plug_select (GstElement *decoder, GstPad *pad, GstCaps *caps,
GstElementFactory *factory, int *width )
{
const gchar *klass = gst_element_factory_get_klass (factory);
/* MW_customData *cdata = (MW_customData*) data;*/
GstCaps *scaps = gst_pad_query_caps (pad, NULL);
GstStructure *str = gst_caps_get_structure (scaps, 0);
const gchar *type = gst_structure_get_name (str);
printf (" Pad cap: %s\n", type);
if (g_strrstr(type,"video"))
{
gst_structure_get_int (str, "width", width);
printf(" Width: %d\n", *width);
}
}
int main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *filesrc, *decoder, *fakesink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
/* check args */
if (argc != 2) {
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
/* Bus call back*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* create file source and typefind element */
filesrc = gst_element_factory_make ("filesrc", "source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
decoder = gst_element_factory_make ("decodebin", NULL);
fakesink = gst_element_factory_make ("fakesink", "sink");
int width = 0;
/* Connect the sink pad when decoder completes the operation */
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), &width);
g_signal_connect (decoder, "autoplug-select", G_CALLBACK (auto_plug_select), fakesink);
/* setup */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, fakesink, NULL);
gst_element_link (filesrc, decoder);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
return 0;
}
You do not leave the pipeline any time for running. You probably stop it before data can trigger the decodebin's callbacks.
For being cheap try:
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_usleep(100000000);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
But more correct would be to use a real GMainLoop and act on certain event to stop the pipeline again.
EDIT: P.S. Why not GstDiscoverer? https://gstreamer.freedesktop.org/documentation/pbutils/gstdiscoverer.html?gi-language=c
I'm new to C coding and I am writing a basic transcoding program for a project I am working on. I was wondering if anyone has a basic example which would allow me to capture the output statistics for example (actual bitrate for video and audio, framerate, resolution size, video h264 level, etc)
Please see below code:
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
static gboolean
cb_print_position (GstElement *pipeline)
{
gint64 pos;
if (gst_element_query_position (pipeline, GST_FORMAT_TIME, &pos))
{
g_print ("Time: %" GST_TIME_FORMAT "\r", GST_TIME_ARGS (pos));
}
/* call me again */
return TRUE;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *videotestsrcm, *x264encm, *rtmpsinkm, *flvmuxm;
GstBus *bus;
guint bus_watch_id;
/* Initialisation */
gst_init (&argc, &argv);
const gchar*nano_str;guint major, minor, micro, nano;
gst_init (&argc, &argv);
gst_version (&major, &minor, µ, &nano);
if (nano == 1)
nano_str = "(CVS)";
else if (nano == 2)
nano_str = "(Prerelease)";
else
nano_str = "";
printf ("This program is linked against GStreamer %d.%d.%d%s\n",major, minor, micro, nano_str);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("videotest-pipeline");
videotestsrcm = gst_element_factory_make ("videotestsrc", "testsource");
x264encm = gst_element_factory_make ("x264enc", "videoencoder");
rtmpsinkm = gst_element_factory_make ("rtmpsink", "video2sink");
flvmuxm = gst_element_factory_make ("flvmux", "muxer");
if (!pipeline || !videotestsrcm || !x264encm || !rtmpsinkm || !flvmuxm) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
g_object_set (G_OBJECT (rtmpsinkm), "location" , argv[1] , NULL);
/* Set up the pipeline */
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
videotestsrcm, x264encm, rtmpsinkm, flvmuxm, NULL);
/* we link the elements together */
/* videotestsrcm -> autovideosinkm */
gst_element_link_many (videotestsrcm, x264encm, flvmuxm, rtmpsinkm, NULL);
/* Set the pipeline to "playing" state*/
g_print ("Now set pipeline in state playing...\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* run pipeline */
g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
I want to display two pictures in the same window. This code displays them in two different windows.
Is there a solution for joining the two pictures in the same window?
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *pipeline,*freeze,*clrspace, *source1, *source2, *videobox1,*videobox2, *mixer,*sink,*queuevideo;
GstBus *bus;
loop = g_main_loop_new (NULL, FALSE);
gst_init (&argc, &argv);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("playbin2", "dec1");
source2 = gst_element_factory_make ("playbin2", "dec2");
freeze = gst_element_factory_make ("imagefreeze", "fr");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
mixer = gst_element_factory_make ("videomixer", "mixer");
queuevideo = gst_element_factory_make ("queue", "queue-video");
sink = gst_element_factory_make ("autovideosink", "sink");
if (!pipeline || !source1 || !source2 || !sink || !mixer ||!freeze || !clrspace || !queuevideo ) {
g_printerr ("One element could not be created. Exiting.\n");
exit(1);
}
g_object_set (source1, "uri", "http://www.logotheque.fr/6396-2/logo+RMC+INFO.jpg", NULL);
g_object_set (source2, "uri", "http://www.logotheque.fr/6396-2/logo+RMC+INFO.jpg", NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN(pipeline), source1,mixer, clrspace, freeze,videobox1, sink, NULL);
/* we link the elements together */
gst_element_link_many (source2, mixer, clrspace, freeze,videobox2,sink, NULL);
//gst_element_link_many(source[1], mixer, NULL);
g_signal_connect (source1, "pad-added", G_CALLBACK (on_pad_added), queuevideo);
g_signal_connect (source2, "pad-added", G_CALLBACK (on_pad_added), queuevideo);
/* Set the pipeline to "playing" state*/
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT(pipeline));
}
Here is how to combine two images with gstreamer from the command line :
gst-launch-1.0 uridecodebin uri=file:///home/meh/Pictures/questions.jpg ! videoscale ! video/x-raw, width=320, height=240 ! imagefreeze ! videomixer name=m sink_1::xpos=320 ! autovideosink uridecodebin uri=file:///home/meh/Pictures/testsrc.png ! videoscale ! video/x-raw, width=320, height=240 ! imagefreeze ! m.
Explanation :
We create two decoders for the images, resize them with videoscale to an arbitrary size (here 320 x 240), freeze them and send them to videomixer. videomixer has the x position of sink_1 set to 320, which offsets the first image so that the second one appears as well.
My Scenario is as follows :-
I have set up a RTSP server at IP 192.168.1.24 at port 554.I use the following gst-launch command on client side to receive packets and everything works fine.
gst-launch rtspsrc location = rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp ! fakesink
But when I implement the same thing via C code it gives me error.My C code is as follows:-
#include <gst.h>
#include <glib.h>
static gboolean bus-call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *sink;
GstBus *bus;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
if (argc != 2) {
return -1;
}
pipeline = gst_pipeline_new ("network-player");
source = gst_element_factory_make ("rtspsrc","file-source");
sink = gst_element_factory_make ("fakesink","fake");
if (!pipeline || !source || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN (pipeline),source, sink, NULL);
gst_element_link_many (source, sink, NULL);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I am able to compile the code without any error.
But when I run the binary generated with the following format:-
user#user:~ ./helloworld rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp
I get the following error:-
Now playing: rtsp://root:nlss123#192.168.1.24:554/axis-media/media.amp
Running...
**Error: Internal data flow error**.
Returned, stopping playback
Deleting pipeline
Can anyone suggest we there is Internal Data flow error ?
i also had the same problem.
You should link source to to sink with "pad-added" signal.
In brief:
typedef struct myDataTag {
GstElement *pipeline;
GstElement *rtspsrc;
GstElement *depayloader;
GstElement *decoder;
*sink;
} myData_t;
myData_t appData;
appData->pipeline = gst_pipeline_new ("videoclient");
appData->rtspsrc = gst_element_factory_make ("rtspsrc", "rtspsrc");
g_object_set (G_OBJECT (appData->rtspsrc), "location", "rtsp://192.168.1.10:554/myStreamPath", NULL);
appData->depayloader = gst_element_factory_make ("rtph264depay","depayloader");
appData->decoder = gst_element_factory_make ("h264dec", "decoder");
appData->sink = gst_element_factory_make ("autovideosink", "sink");
//then add all elements together
gst_bin_add_many (GST_BIN (appData->pipeline), appData->rtspsrc, appData->depayloader, appData->decoder, appData->sink, NULL);
//link everythink after source
gst_element_link_many (appData->depayloader, appData->decoder, appData->sink, NULL);
/*
* Connect to the pad-added signal for the rtpbin. This allows us to link
* the dynamic RTP source pad to the depayloader when it is created.
*/
g_signal_connect (appData->rtspsrc, "pad-added", G_CALLBACK (pad_added_handler), &appData);
/* Set the pipeline to "playing" state*/
gst_element_set_state (appData->pipeline, GST_STATE_PLAYING);
/* pad added handler */
static void pad_added_handler (GstElement *src, GstPad *new_pad, myData_t *pThis) {
GstPad *sink_pad = gst_element_get_static_pad (pThis->depayloader, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's name */
if (!g_str_has_prefix (GST_PAD_NAME (new_pad), "recv_rtp_src_")) {
g_print (" It is not the right pad. Need recv_rtp_src_. Ignoring.\n");
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" Sink pad from %s already linked. Ignoring.\n", GST_ELEMENT_NAME (src));
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
Hope that this will help someone..:)
you can get verbose error logs by running the apps by --gst-debug=*rtsp*:5 e.g.
./yourApplication --gst-debug=*rtsp*:5