I'm having pipeline with appsink which pushes samples to appsrc which acts as a source to pipeline created by rtsp server. It works, I can connect to rtsp server and see the streamed video. The problem is latency. For some reason a lot of buffers is queued in the appsrc and the viewed stream has latency of more than two seconds.
I tried to find the source of latency and it looks like the data are started to being read from appsrc source pad after some time from the point the pipeline is started. The delay between the point the pipeline is started and the point data start to be read out from appsrc source pad is then transformed to it's latency.
I found this by reading out how many bytes is queued in appsrc each time I push the buffer to it. This value which I read out is continuously rising for some time. When the read out of data starts the current amout of the bytes stored in appsrc queue stay approximately the same for the rest of the time I stream the video.
Here is my test application which I'm using to test the correct functionality of this design.
#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <time.h>
#include <gst/rtsp-server/rtsp-server.h>
GMainLoop *loop;
GstElement *appsink;
GstElement *appsrc;
GstElement *appsink_pipeline;
/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str);
return TRUE;
}
static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i;
g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx);
return;
}
if (gst_caps_is_empty (caps)) {
g_print ("%sEMPTY\n", pfx);
return;
}
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx);
}
}
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static GstFlowReturn
on_new_sample_from_sink (GstElement * elt, void * data)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
guint64 bytes;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
if(appsrc)
{
bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
g_print("buffered bytes before push %lu\n", bytes);
ret = gst_app_src_push_sample(GST_APP_SRC (appsrc), sample);
// bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
// if(ret == GST_FLOW_OK)
// g_print("pushed ok - buffered bytes after push %lu\n", bytes);
}
gst_sample_unref (sample);
return ret;
}
/* called when we get a GstMessage from the source pipeline when we get EOS, we
* notify the appsrc of it. */
static gboolean
on_source_message (GstBus * bus, GstMessage * message, void * data)
{
gint percent;
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("The source got dry\n");
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_BUFFERING:
gst_message_parse_buffering (message, &percent);
g_print ("Buffering = %d\n", percent);
break;
default:
break;
}
return TRUE;
}
static GstFlowReturn need_data (GstElement * appsrc_loc,
guint length,
gpointer udata)
{
g_print("Need data\n");
return GST_FLOW_OK;
}
/* this timeout is periodically run to clean up the expired sessions from the
* pool. This needs to be run explicitly currently but might be done
* automatically as part of the mainloop. */
static gboolean
timeout (GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool (server);
gst_rtsp_session_pool_cleanup (pool);
g_object_unref (pool);
return TRUE;
}
void clientConnected(GstRTSPServer* server, GstRTSPClient* client, gpointer user)
{
g_print("%s\n", __func__);
}
static void media_state_cb(GstRTSPMedia *media, GstState state)
{
g_print("media state = %d\n", state);
}
static void
media_construct (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'appsrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
g_signal_connect (appsrc, "need-data",
G_CALLBACK (need_data), NULL);
g_signal_connect (media, "new-state",
G_CALLBACK (media_state_cb), NULL);
gst_object_unref (element);
}
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstPad *pad;
GstCaps *caps;
gchar *caps_str;
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
pad = gst_element_get_static_pad (appsink, "sink");
if(pad)
{
g_print("Got pad\n");
caps = gst_pad_get_current_caps (pad);
if(caps)
{
caps_str = gst_caps_to_string (caps);
g_print("Got caps %s\n", caps_str);
g_object_set (G_OBJECT (appsrc), "caps", caps, NULL);
gst_caps_unref(caps);
}
}
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
gst_object_unref (element);
}
int main (int argc, char *argv[]){
GstBus *bus;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gchar src[] = "nvv4l2camerasrc device=/dev/video0 ! video/x-raw(memory:NVMM), width=1920, height=1080, format=UYVY, framerate=60/1 ! "
" queue max-size-buffers=3 leaky=downstream ! "
" nvvidconv name=conv ! video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=60/1 ! "
" nvv4l2h264enc control-rate=1 bitrate=8000000 preset-level=1 profile=0 disable-cabac=1 maxperf-enable=1 name=encoder insert-sps-pps=1 insert-vui=1 idrinterval=30 ! "
" appsink name=appsink sync=false max-buffers=3";
gchar sink[] = "( appsrc name=appsrc format=3 stream-type=0 is-live=true blocksize=2097152 max-bytes=200000 ! "
" queue max-size-buffers=3 leaky=no ! "
" rtph264pay config-interval=1 name=pay0 )";
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create pipeline with appsink */
g_print("Creating pipeline with appsink\n");
appsink_pipeline = gst_parse_launch (src, NULL);
if (appsink_pipeline == NULL) {
g_print ("Bad source\n");
g_main_loop_unref (loop);
return -1;
}
/* to be notified of messages from this pipeline, mostly EOS */
bus = gst_element_get_bus (appsink_pipeline);
gst_bus_add_watch (bus, (GstBusFunc) on_source_message, appsink_pipeline);
gst_object_unref (bus);
/* Create push_buffer callback for appsink */
g_print("Creating push buffer callback\n");
appsink = gst_bin_get_by_name (GST_BIN (appsink_pipeline), "appsink");
g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample",
G_CALLBACK (on_new_sample_from_sink), NULL);
/* Create rtsp server with pipeline starting with appsrc */
g_print("Creating rtsp server\n");
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, sink);
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach (server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds (2, (GSourceFunc) timeout, server);
g_signal_connect (server, "client-connected",
G_CALLBACK (clientConnected), NULL);
/* Create media-constructed callback to get appsrc reference */
g_print("Creating media-constructed callback\n");
g_signal_connect (factory, "media-constructed", (GCallback) media_construct,
NULL);
g_signal_connect (factory, "media-configure", (GCallback) media_configure,
NULL);
/* Push buffers from appsink to appsrc */
/* start serving, this never stops */
g_print("Running main loop\n");
gst_element_set_state (appsink_pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (appsink_pipeline, GST_STATE_NULL);
return 0;
/* ERRORS */
failed:
{
g_print ("failed to attach the server\n");
return -1;
}
}
I will appreciate every idea about what can cause this behavior and how to solve this.
Thanks a lot!
This latency problem may be due to many reasons but most of the time this problem is due to frames are not in SYNC. There is a lot of data in the queue.
To counter this problem need to test these test cases to find out the real problem.
Check the behavior with videotestsrc instead of the camera source.
Are you sure that after nvv4l2camerasrc queue is needed what will be the output if you skip the queue element.
You can also check with lower resolution input to get something from it.
what happened if you use v4l2src instead of nvv4l2camerasrc if your camera soruce is v4l2 complaince.
Thanks
Related
I am writing a media application to grab video frames from a video file. For this, I want to get the video properties before pulling the samples from the pipeline. So, I have added a callback for auto-plug signal at the decoder and trying to get the properties. These callbacks are not getting called even after I put the pipeline into playing state, but these are called if I try to pull a sample from the pipeline using gst_app_sink_pull_sample.
Am I missing anything here? My understanding is that these callbacks will get invoked when we put the pipeline into playing state.
#include <gst/gst.h>
#include <stdio.h>
static void bus_callback (GstBus *bus, GstMessage *msg, gpointer data)
{
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
default:
/* Unhandled message */
break;
}
}
static void
on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the decoder sink pad */
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static void
auto_plug_select (GstElement *decoder, GstPad *pad, GstCaps *caps,
GstElementFactory *factory, int *width )
{
const gchar *klass = gst_element_factory_get_klass (factory);
/* MW_customData *cdata = (MW_customData*) data;*/
GstCaps *scaps = gst_pad_query_caps (pad, NULL);
GstStructure *str = gst_caps_get_structure (scaps, 0);
const gchar *type = gst_structure_get_name (str);
printf (" Pad cap: %s\n", type);
if (g_strrstr(type,"video"))
{
gst_structure_get_int (str, "width", width);
printf(" Width: %d\n", *width);
}
}
int main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *filesrc, *decoder, *fakesink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
/* check args */
if (argc != 2) {
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
/* Bus call back*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* create file source and typefind element */
filesrc = gst_element_factory_make ("filesrc", "source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
decoder = gst_element_factory_make ("decodebin", NULL);
fakesink = gst_element_factory_make ("fakesink", "sink");
int width = 0;
/* Connect the sink pad when decoder completes the operation */
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), &width);
g_signal_connect (decoder, "autoplug-select", G_CALLBACK (auto_plug_select), fakesink);
/* setup */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, fakesink, NULL);
gst_element_link (filesrc, decoder);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
return 0;
}
You do not leave the pipeline any time for running. You probably stop it before data can trigger the decodebin's callbacks.
For being cheap try:
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_usleep(100000000);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
But more correct would be to use a real GMainLoop and act on certain event to stop the pipeline again.
EDIT: P.S. Why not GstDiscoverer? https://gstreamer.freedesktop.org/documentation/pbutils/gstdiscoverer.html?gi-language=c
the general goal is that i want to play an audio track on my RPi with aplay ("aplay example.mp3") and the output audio gets looped back into an gstreamer program. This program then does a spectrum analysis.
I got the spectrum analysis already working on a static file with this code as source:
data.source = gst_element_factory_make ("uridecodebin", "source");
g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
ofc i want to use the overall output from my RPi as a source for the program but i dont know how. I know i need to loopback the audio from the output to the input and i found that snd-aloop looks promising. Problem is i still dont know how to use it. I tried to do:
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", XXX ,NULL);
where XXX =
"alsa_output.platform-snd_aloop.0.analog-stereo.monitor"
"hw:1"
"hw:0"
Error -> Trying to dispose element sink, but it is in READY instead of the NULL state. You need to explicitly set Elements to the NULL state before dropping the final reference [...]
Bonus question: Is it possible to pipe audio into a gstreamer program? something like: "aplay example.mp3 > gstreamerCprogram".
Here is the code:
#include <gst/gst.h>
#define AUDIOFREQ 32000
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
static gboolean message_handler (GstBus *bus, GstMessage *message, gpointer data){
if(message->type == GST_MESSAGE_EOS){
g_printerr("EOS\n");
}
if(message->type == GST_MESSAGE_ELEMENT){
const GstStructure *s = gst_message_get_structure (message);
const gchar *name = gst_structure_get_name(s);
if(strcmp(name, "spectrum") == 0){
const GValue *magnitudes;
gdouble freq;
magnitudes = gst_structure_get_value (s,"magnitude");
int i = 0;
for(i = 0; i < 20; ++i){
freq = (gdouble)((32000/2) * i + 32000 / 4 / 20);
if(freq > 10000){
g_printerr("%f\n",freq);
}else{
g_printerr("|");
}
}
}
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstCaps *caps;
GstElement *spectrum;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
//____________________________HERE IS THE PROBLEM________________________
//data.source = gst_element_factory_make ("uridecodebin", "source");
//g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", "alsa_output.platform-snd_aloop.0.analog-stereo.monitor",NULL);
//____________________________HERE ENDS THE PROBLEM________________________
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
spectrum = gst_element_factory_make ("spectrum", "spectrum");
caps = gst_caps_new_simple ("audio/x-raw", "rate",G_TYPE_INT, AUDIOFREQ, NULL);
//SET SOME VARIABLES ON SPECTRUM
g_object_set (G_OBJECT (spectrum), "bands", 20, "post-messages", TRUE, "message-phase", TRUE, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink || !caps || !spectrum) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , spectrum,data.sink, NULL);
if (!gst_element_link_many (data.convert, spectrum, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
GMainLoop *loop;
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_watch(bus, message_handler, NULL);
loop = g_main_loop_new (NULL,FALSE);
g_main_loop_run(loop);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
I'm trying to implement the following pipeline in 'C':
arif#dev:~/GS_samples/cmd_GS$gst-launch-0.10 filesrc location="../sample_media/M1F1-Alaw-AFsp.wav" ! wavparse ! alawdec ! autoaudiosink
Here is the implementation which i have written
#include <gst/gst.h>
void on_pad_added(GstElement *src_element, GstPad *src_pad, gpointer data);
static gboolean bus_cb(GstBus *bus, GstMessage *message, gpointer data);
static GMainLoop *loop;
int main(int argc, char **argv) {
GstElement *pipeline;
GstElement *src;
GstElement *dec;
GstElement *parse;
GstElement *sink;
GstBus *bus;
gst_init(&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
pipeline = gst_pipeline_new("wav_player");
src = gst_element_factory_make("filesrc","src");
sink = gst_element_factory_make("autoaudiosink","sink");
parse = gst_element_factory_make("wavparse","parse");
dec = gst_element_factory_make("alawdec", "dec");
gst_bin_add_many (GST_BIN(pipeline), src,parse,dec,sink, NULL);
g_object_set( G_OBJECT (src) , "location",argv[1], NULL);
gst_element_link(src,parse);
gst_element_link(dec,sink);
g_signal_connect (dec, "pad-added", G_CALLBACK (on_pad_added), dec);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch (bus, bus_cb, NULL);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
return 0;
}
void on_pad_added (GstElement *src_element, GstPad *src_pad, gpointer data)
{
g_print ("linking dynamic pad ...\n");
GstElement *sink_element = (GstElement *) data;
GstPad *sink_pad = gst_element_get_static_pad(sink_element, "sink");
gst_pad_link (src_pad, sink_pad);
gst_object_unref(sink_pad);
}
static gboolean bus_cb(GstBus *bus, GstMessage *message, gpointer data)
{
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
g_main_loop_quit (loop);
break;
default:
/* unhandled message */
break;
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
But this does not work :
arif#dev:~/GS_samples/cmd_GS$./a.out ../sample_media/M1F1-Alaw-AFsp.wav
Got state-changed message
Got state-changed message
Got stream-status message
Got tag message
Got error message
Error: Internal data flow error.
The problem is in this line :
gst_element_link(src,parse);
gst_element_link(dec,sink);
You are trying to build one pipeline that does one task for you , but you are not linking them properly. You should use : gst_element_link_many(src,parse,dec,sink)
Pay attention that the order of these elements are important and the output of one, is the input of the other.
EDIT: You also have two other problems which i just modified and it is working:
why are you using a decoder? you are already parsing your .wav file drop the decoder element and have the data flow to audiosink to be played.
nowhere in your code are you setting your pipeline to playing State. add this block of code to set your pipe to playing state:
GstStateChangeReturn ret;
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
I am trying learning how to use dynamic pads in gstreamer. So I tried to add pad-added signal so I can get a message once an element is created. However, I didn't get any message.
Here is the code:
#include <gst/gst.h>
static void
cb_new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
gchar *name;
name = gst_pad_get_name (pad);
g_print ("A new pad %s was created\n", name);
g_free (name);
/* here, you would setup a new pad link for the newly created pad */
}
int
main (int argc,
char *argv[])
{
GstElement *pipeline, *source, *demux;
GMainLoop *loop;
/* init */
gst_init (&argc, &argv);
/* create elements */
pipeline = gst_pipeline_new ("my_pipeline");
source = gst_element_factory_make ("filesrc", "source");
g_object_set (source, "location", argv[1], NULL);
demux = gst_element_factory_make ("oggdemux", "demuxer");
/* put together a pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL);
gst_element_link_pads (source, "src", demux, "sink");
/* listen for newly created pads */
g_signal_connect (demux, "pad-added", G_CALLBACK (cb_new_pad), NULL);
/* start the pipeline */
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (loop);
}
So what is the problem ? (By the way I am using gstreamer 1.2.1
Your code worked fine for me.
Your demuxer probably could not demultiplex the stream, check the input file that you are providing. It is probably not a valid ogg file.
On a related note, do add debugging code to your program i.e. listen to the bus for messages. It helps a lot when something doesn't work.
The basic tutorial 3 of the gstreamer sdk is a perfect example for what you're trying to do.
My Scenario is as follows :-
I have set up a RTSP server at IP 192.168.1.24 at port 554.I use the following gst-launch command on client side to receive packets and everything works fine.
gst-launch rtspsrc location = rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp ! fakesink
But when I implement the same thing via C code it gives me error.My C code is as follows:-
#include <gst.h>
#include <glib.h>
static gboolean bus-call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *sink;
GstBus *bus;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
if (argc != 2) {
return -1;
}
pipeline = gst_pipeline_new ("network-player");
source = gst_element_factory_make ("rtspsrc","file-source");
sink = gst_element_factory_make ("fakesink","fake");
if (!pipeline || !source || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN (pipeline),source, sink, NULL);
gst_element_link_many (source, sink, NULL);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I am able to compile the code without any error.
But when I run the binary generated with the following format:-
user#user:~ ./helloworld rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp
I get the following error:-
Now playing: rtsp://root:nlss123#192.168.1.24:554/axis-media/media.amp
Running...
**Error: Internal data flow error**.
Returned, stopping playback
Deleting pipeline
Can anyone suggest we there is Internal Data flow error ?
i also had the same problem.
You should link source to to sink with "pad-added" signal.
In brief:
typedef struct myDataTag {
GstElement *pipeline;
GstElement *rtspsrc;
GstElement *depayloader;
GstElement *decoder;
*sink;
} myData_t;
myData_t appData;
appData->pipeline = gst_pipeline_new ("videoclient");
appData->rtspsrc = gst_element_factory_make ("rtspsrc", "rtspsrc");
g_object_set (G_OBJECT (appData->rtspsrc), "location", "rtsp://192.168.1.10:554/myStreamPath", NULL);
appData->depayloader = gst_element_factory_make ("rtph264depay","depayloader");
appData->decoder = gst_element_factory_make ("h264dec", "decoder");
appData->sink = gst_element_factory_make ("autovideosink", "sink");
//then add all elements together
gst_bin_add_many (GST_BIN (appData->pipeline), appData->rtspsrc, appData->depayloader, appData->decoder, appData->sink, NULL);
//link everythink after source
gst_element_link_many (appData->depayloader, appData->decoder, appData->sink, NULL);
/*
* Connect to the pad-added signal for the rtpbin. This allows us to link
* the dynamic RTP source pad to the depayloader when it is created.
*/
g_signal_connect (appData->rtspsrc, "pad-added", G_CALLBACK (pad_added_handler), &appData);
/* Set the pipeline to "playing" state*/
gst_element_set_state (appData->pipeline, GST_STATE_PLAYING);
/* pad added handler */
static void pad_added_handler (GstElement *src, GstPad *new_pad, myData_t *pThis) {
GstPad *sink_pad = gst_element_get_static_pad (pThis->depayloader, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's name */
if (!g_str_has_prefix (GST_PAD_NAME (new_pad), "recv_rtp_src_")) {
g_print (" It is not the right pad. Need recv_rtp_src_. Ignoring.\n");
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" Sink pad from %s already linked. Ignoring.\n", GST_ELEMENT_NAME (src));
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
Hope that this will help someone..:)
you can get verbose error logs by running the apps by --gst-debug=*rtsp*:5 e.g.
./yourApplication --gst-debug=*rtsp*:5