I am trying to stream a HLS Stream using gstreamer 1.0. I have a filter which works
gst-launch-1.0 -v souphttpsrc location="http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8" ! hlsdemux ! tsdemux ! h264parse ! avdec_h264 ! autovideosink
But when I try to convert this into c code, it fails.
int main(int argc, char* argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer, *tsdemux, *h264parse, *vdecoder, *vsink;
GstElement *aacparse, *adecoder, *aconvert, *asink;
GstBus *bus;
int bus_watch_id;
gst_debug_set_default_threshold(3);
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
if (argc != 2)
{
g_printerr("Usage: %s <http stream source>\n", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("myApp");
source = gst_element_factory_make("souphttpsrc", "http-src");
demuxer = gst_element_factory_make("hlsdemux", "hls-demuxer");
tsdemux = gst_element_factory_make("tsdemux", "ts-demuxer");
h264parse = gst_element_factory_make("h264parse", "h264parse");
vdecoder = gst_element_factory_make("avdec_h264", "h264decoder");
vsink = gst_element_factory_make("autovideosink", "videosink");
/* set the input url to the source element */
g_object_set(G_OBJECT(source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
/* add elements into the pipeline */ //next aacparse
gst_bin_add_many(GST_BIN (pipeline), source, demuxer, tsdemux, h264parse, vdecoder,vsink, NULL);
gst_element_link(source, demuxer);
// this was wrong
/*gst_element_link_many(tsdemux, h264parse, vdecoder, vsink, NULL);*/
/* connect demuxer and decoder on pad added */
/*g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), vdecoder);*/
// Correct Implementation
gst_element_link_many(h264parse, vdecoder, vsink, NULL);
g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), tsdemux);
g_signal_connect(tsdemux, "pad-added", G_CALLBACK(on_pad_added), h264parse);
g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), vdecoder);
// Correct Implementation
g_print ("Starting play: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run(loop);
/* Clean up after execution of main loop */
g_print ("Stopping Playback: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print ("Quitting\n");
g_object_unref(G_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
I compile the code using:
cc my_app.c -o my_app $(pkg-config --cflags --libs gstreamer-1.0)
And launch the application using:
./my_app http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8
I always get the following error:
hlsdemux gsthlsdemux.c:792:gst_hls_demux_stream_loop:<hls-demuxer> error: stream stopped, reason not-negotiated
Do I have to do anything differently?
// this was wrong
gst_element_link(source, demuxer);
// Correct Implementation
gst_element_link(source, demuxer,NULL);
where is your void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data); function?and take a look at here.
I was able to get it working. The problem was "tsdemux" and "demuxer" have to connect at run time in "on_pad_added" method.
Related
I'm having pipeline with appsink which pushes samples to appsrc which acts as a source to pipeline created by rtsp server. It works, I can connect to rtsp server and see the streamed video. The problem is latency. For some reason a lot of buffers is queued in the appsrc and the viewed stream has latency of more than two seconds.
I tried to find the source of latency and it looks like the data are started to being read from appsrc source pad after some time from the point the pipeline is started. The delay between the point the pipeline is started and the point data start to be read out from appsrc source pad is then transformed to it's latency.
I found this by reading out how many bytes is queued in appsrc each time I push the buffer to it. This value which I read out is continuously rising for some time. When the read out of data starts the current amout of the bytes stored in appsrc queue stay approximately the same for the rest of the time I stream the video.
Here is my test application which I'm using to test the correct functionality of this design.
#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <time.h>
#include <gst/rtsp-server/rtsp-server.h>
GMainLoop *loop;
GstElement *appsink;
GstElement *appsrc;
GstElement *appsink_pipeline;
/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str);
return TRUE;
}
static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i;
g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx);
return;
}
if (gst_caps_is_empty (caps)) {
g_print ("%sEMPTY\n", pfx);
return;
}
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx);
}
}
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static GstFlowReturn
on_new_sample_from_sink (GstElement * elt, void * data)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
guint64 bytes;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
if(appsrc)
{
bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
g_print("buffered bytes before push %lu\n", bytes);
ret = gst_app_src_push_sample(GST_APP_SRC (appsrc), sample);
// bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
// if(ret == GST_FLOW_OK)
// g_print("pushed ok - buffered bytes after push %lu\n", bytes);
}
gst_sample_unref (sample);
return ret;
}
/* called when we get a GstMessage from the source pipeline when we get EOS, we
* notify the appsrc of it. */
static gboolean
on_source_message (GstBus * bus, GstMessage * message, void * data)
{
gint percent;
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("The source got dry\n");
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_BUFFERING:
gst_message_parse_buffering (message, &percent);
g_print ("Buffering = %d\n", percent);
break;
default:
break;
}
return TRUE;
}
static GstFlowReturn need_data (GstElement * appsrc_loc,
guint length,
gpointer udata)
{
g_print("Need data\n");
return GST_FLOW_OK;
}
/* this timeout is periodically run to clean up the expired sessions from the
* pool. This needs to be run explicitly currently but might be done
* automatically as part of the mainloop. */
static gboolean
timeout (GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool (server);
gst_rtsp_session_pool_cleanup (pool);
g_object_unref (pool);
return TRUE;
}
void clientConnected(GstRTSPServer* server, GstRTSPClient* client, gpointer user)
{
g_print("%s\n", __func__);
}
static void media_state_cb(GstRTSPMedia *media, GstState state)
{
g_print("media state = %d\n", state);
}
static void
media_construct (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'appsrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
g_signal_connect (appsrc, "need-data",
G_CALLBACK (need_data), NULL);
g_signal_connect (media, "new-state",
G_CALLBACK (media_state_cb), NULL);
gst_object_unref (element);
}
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstPad *pad;
GstCaps *caps;
gchar *caps_str;
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
pad = gst_element_get_static_pad (appsink, "sink");
if(pad)
{
g_print("Got pad\n");
caps = gst_pad_get_current_caps (pad);
if(caps)
{
caps_str = gst_caps_to_string (caps);
g_print("Got caps %s\n", caps_str);
g_object_set (G_OBJECT (appsrc), "caps", caps, NULL);
gst_caps_unref(caps);
}
}
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
gst_object_unref (element);
}
int main (int argc, char *argv[]){
GstBus *bus;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gchar src[] = "nvv4l2camerasrc device=/dev/video0 ! video/x-raw(memory:NVMM), width=1920, height=1080, format=UYVY, framerate=60/1 ! "
" queue max-size-buffers=3 leaky=downstream ! "
" nvvidconv name=conv ! video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=60/1 ! "
" nvv4l2h264enc control-rate=1 bitrate=8000000 preset-level=1 profile=0 disable-cabac=1 maxperf-enable=1 name=encoder insert-sps-pps=1 insert-vui=1 idrinterval=30 ! "
" appsink name=appsink sync=false max-buffers=3";
gchar sink[] = "( appsrc name=appsrc format=3 stream-type=0 is-live=true blocksize=2097152 max-bytes=200000 ! "
" queue max-size-buffers=3 leaky=no ! "
" rtph264pay config-interval=1 name=pay0 )";
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create pipeline with appsink */
g_print("Creating pipeline with appsink\n");
appsink_pipeline = gst_parse_launch (src, NULL);
if (appsink_pipeline == NULL) {
g_print ("Bad source\n");
g_main_loop_unref (loop);
return -1;
}
/* to be notified of messages from this pipeline, mostly EOS */
bus = gst_element_get_bus (appsink_pipeline);
gst_bus_add_watch (bus, (GstBusFunc) on_source_message, appsink_pipeline);
gst_object_unref (bus);
/* Create push_buffer callback for appsink */
g_print("Creating push buffer callback\n");
appsink = gst_bin_get_by_name (GST_BIN (appsink_pipeline), "appsink");
g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample",
G_CALLBACK (on_new_sample_from_sink), NULL);
/* Create rtsp server with pipeline starting with appsrc */
g_print("Creating rtsp server\n");
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, sink);
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach (server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds (2, (GSourceFunc) timeout, server);
g_signal_connect (server, "client-connected",
G_CALLBACK (clientConnected), NULL);
/* Create media-constructed callback to get appsrc reference */
g_print("Creating media-constructed callback\n");
g_signal_connect (factory, "media-constructed", (GCallback) media_construct,
NULL);
g_signal_connect (factory, "media-configure", (GCallback) media_configure,
NULL);
/* Push buffers from appsink to appsrc */
/* start serving, this never stops */
g_print("Running main loop\n");
gst_element_set_state (appsink_pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (appsink_pipeline, GST_STATE_NULL);
return 0;
/* ERRORS */
failed:
{
g_print ("failed to attach the server\n");
return -1;
}
}
I will appreciate every idea about what can cause this behavior and how to solve this.
Thanks a lot!
This latency problem may be due to many reasons but most of the time this problem is due to frames are not in SYNC. There is a lot of data in the queue.
To counter this problem need to test these test cases to find out the real problem.
Check the behavior with videotestsrc instead of the camera source.
Are you sure that after nvv4l2camerasrc queue is needed what will be the output if you skip the queue element.
You can also check with lower resolution input to get something from it.
what happened if you use v4l2src instead of nvv4l2camerasrc if your camera soruce is v4l2 complaince.
Thanks
I am writing a media application to grab video frames from a video file. For this, I want to get the video properties before pulling the samples from the pipeline. So, I have added a callback for auto-plug signal at the decoder and trying to get the properties. These callbacks are not getting called even after I put the pipeline into playing state, but these are called if I try to pull a sample from the pipeline using gst_app_sink_pull_sample.
Am I missing anything here? My understanding is that these callbacks will get invoked when we put the pipeline into playing state.
#include <gst/gst.h>
#include <stdio.h>
static void bus_callback (GstBus *bus, GstMessage *msg, gpointer data)
{
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
default:
/* Unhandled message */
break;
}
}
static void
on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the decoder sink pad */
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static void
auto_plug_select (GstElement *decoder, GstPad *pad, GstCaps *caps,
GstElementFactory *factory, int *width )
{
const gchar *klass = gst_element_factory_get_klass (factory);
/* MW_customData *cdata = (MW_customData*) data;*/
GstCaps *scaps = gst_pad_query_caps (pad, NULL);
GstStructure *str = gst_caps_get_structure (scaps, 0);
const gchar *type = gst_structure_get_name (str);
printf (" Pad cap: %s\n", type);
if (g_strrstr(type,"video"))
{
gst_structure_get_int (str, "width", width);
printf(" Width: %d\n", *width);
}
}
int main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *filesrc, *decoder, *fakesink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
/* check args */
if (argc != 2) {
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
/* Bus call back*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* create file source and typefind element */
filesrc = gst_element_factory_make ("filesrc", "source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
decoder = gst_element_factory_make ("decodebin", NULL);
fakesink = gst_element_factory_make ("fakesink", "sink");
int width = 0;
/* Connect the sink pad when decoder completes the operation */
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), &width);
g_signal_connect (decoder, "autoplug-select", G_CALLBACK (auto_plug_select), fakesink);
/* setup */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, fakesink, NULL);
gst_element_link (filesrc, decoder);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
return 0;
}
You do not leave the pipeline any time for running. You probably stop it before data can trigger the decodebin's callbacks.
For being cheap try:
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_usleep(100000000);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
But more correct would be to use a real GMainLoop and act on certain event to stop the pipeline again.
EDIT: P.S. Why not GstDiscoverer? https://gstreamer.freedesktop.org/documentation/pbutils/gstdiscoverer.html?gi-language=c
I'm trying to implement the following pipeline in 'C':
arif#dev:~/GS_samples/cmd_GS$gst-launch-0.10 filesrc location="../sample_media/M1F1-Alaw-AFsp.wav" ! wavparse ! alawdec ! autoaudiosink
Here is the implementation which i have written
#include <gst/gst.h>
void on_pad_added(GstElement *src_element, GstPad *src_pad, gpointer data);
static gboolean bus_cb(GstBus *bus, GstMessage *message, gpointer data);
static GMainLoop *loop;
int main(int argc, char **argv) {
GstElement *pipeline;
GstElement *src;
GstElement *dec;
GstElement *parse;
GstElement *sink;
GstBus *bus;
gst_init(&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
pipeline = gst_pipeline_new("wav_player");
src = gst_element_factory_make("filesrc","src");
sink = gst_element_factory_make("autoaudiosink","sink");
parse = gst_element_factory_make("wavparse","parse");
dec = gst_element_factory_make("alawdec", "dec");
gst_bin_add_many (GST_BIN(pipeline), src,parse,dec,sink, NULL);
g_object_set( G_OBJECT (src) , "location",argv[1], NULL);
gst_element_link(src,parse);
gst_element_link(dec,sink);
g_signal_connect (dec, "pad-added", G_CALLBACK (on_pad_added), dec);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch (bus, bus_cb, NULL);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
return 0;
}
void on_pad_added (GstElement *src_element, GstPad *src_pad, gpointer data)
{
g_print ("linking dynamic pad ...\n");
GstElement *sink_element = (GstElement *) data;
GstPad *sink_pad = gst_element_get_static_pad(sink_element, "sink");
gst_pad_link (src_pad, sink_pad);
gst_object_unref(sink_pad);
}
static gboolean bus_cb(GstBus *bus, GstMessage *message, gpointer data)
{
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
g_main_loop_quit (loop);
break;
default:
/* unhandled message */
break;
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
But this does not work :
arif#dev:~/GS_samples/cmd_GS$./a.out ../sample_media/M1F1-Alaw-AFsp.wav
Got state-changed message
Got state-changed message
Got stream-status message
Got tag message
Got error message
Error: Internal data flow error.
The problem is in this line :
gst_element_link(src,parse);
gst_element_link(dec,sink);
You are trying to build one pipeline that does one task for you , but you are not linking them properly. You should use : gst_element_link_many(src,parse,dec,sink)
Pay attention that the order of these elements are important and the output of one, is the input of the other.
EDIT: You also have two other problems which i just modified and it is working:
why are you using a decoder? you are already parsing your .wav file drop the decoder element and have the data flow to audiosink to be played.
nowhere in your code are you setting your pipeline to playing State. add this block of code to set your pipe to playing state:
GstStateChangeReturn ret;
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
I am having issues converting my working GStreamer pipeline from a command line version to C code. From the command line the following command will successfully play my headerless mu-law audio file:
gst-launch filesrc location=test.ulaw ! audio/x-mulaw, rate=8000, channels=1 ! mulawdec ! audioconvert ! audioresample ! autoaudiosink
However, my issues are arising when trying to add in the "audio/x-mulaw, rate=8000, channels=1" bit into my C program. The program started off playing wav files (using wavparse in place of mulawdec) so that I know my base C code works, and it must just be that I am misinterpreting how the caps bit needs to be added in to make it work with mu-law files.
I am creating the caps, then using the gst_element_link_filtered to use this:
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
But this is not working, and running the program produces the following output:
>gst-mulaw.exe test.ulaw
Playing.
Error: Internal data flow error.
Playback Finished.
I would be grateful if anyone is able to help shed some light on what I am doing wrong. The full code is given below:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *gstSourceElement, GstPad *gstSourcePad, gpointer data) {
g_print("Linking dynamic pad.\n");
GstPad *gstSinkPad;
GstElement *gstSinkElement = (GstElement *) data;
gstSinkPad = gst_element_get_static_pad (gstSinkElement, "sink");
gst_pad_link (gstSourcePad, gstSinkPad);
gst_object_unref (gstSinkPad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *gstPipeline, *gstFileSource, *gstMuLawDecoder, *gstAudioConvert, *gstAudioResample, *gstAudioSink;
GstBus *bus;
// GStreamer initialisation.
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
// Check input arguments.
if (argc != 2) {
g_printerr ("Usage: %s <mu-law File>\n", argv[0]);
return -1;
}
// Create the GStreamer elements.
gstPipeline = gst_pipeline_new ("player");
gstFileSource = gst_element_factory_make ("filesrc", "filesource");
gstMuLawDecoder = gst_element_factory_make ("mulawdec", "mulawdecoder");
gstAudioConvert = gst_element_factory_make ("audioconvert", "audioconverter");
gstAudioResample = gst_element_factory_make ("audioresample", "audioresampler");
gstAudioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!gstPipeline || !gstFileSource || !gstMuLawDecoder || !gstAudioConvert || !gstAudioResample || !gstAudioSink) {
g_printerr ("An element could not be created. Exiting.\n");
return -1;
}
// Linke the filesrc object to that passed on the command line.
g_object_set (G_OBJECT (gstFileSource), "location", argv[1], NULL);
// Setup the GStreamer bus.
bus = gst_pipeline_get_bus (GST_PIPELINE (gstPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// Add the objects to the pipeline.
gst_bin_add_many (GST_BIN (gstPipeline), gstFileSource, gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
// Set the pipeline to state playing, and run the main loop.
g_print ("Playing.\n");
gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
// Finished playback, cleanup.
g_print ("Playback Finished.\n");
gst_element_set_state (gstPipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (gstPipeline));
return 0;
}
Thanks.
Try changing
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
to
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
pad-added handling is needed for elements with sometimes pads (e.g. demuxers). You can remove the on_pad_added callback function (it was not called anyway, right?) The pad-type can be see in "gst-inspect mulawdec" output.
My Scenario is as follows :-
I have set up a RTSP server at IP 192.168.1.24 at port 554.I use the following gst-launch command on client side to receive packets and everything works fine.
gst-launch rtspsrc location = rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp ! fakesink
But when I implement the same thing via C code it gives me error.My C code is as follows:-
#include <gst.h>
#include <glib.h>
static gboolean bus-call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *sink;
GstBus *bus;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
if (argc != 2) {
return -1;
}
pipeline = gst_pipeline_new ("network-player");
source = gst_element_factory_make ("rtspsrc","file-source");
sink = gst_element_factory_make ("fakesink","fake");
if (!pipeline || !source || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN (pipeline),source, sink, NULL);
gst_element_link_many (source, sink, NULL);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I am able to compile the code without any error.
But when I run the binary generated with the following format:-
user#user:~ ./helloworld rtsp://admin:admin123#192.168.1.24:554/axis-media/media.amp
I get the following error:-
Now playing: rtsp://root:nlss123#192.168.1.24:554/axis-media/media.amp
Running...
**Error: Internal data flow error**.
Returned, stopping playback
Deleting pipeline
Can anyone suggest we there is Internal Data flow error ?
i also had the same problem.
You should link source to to sink with "pad-added" signal.
In brief:
typedef struct myDataTag {
GstElement *pipeline;
GstElement *rtspsrc;
GstElement *depayloader;
GstElement *decoder;
*sink;
} myData_t;
myData_t appData;
appData->pipeline = gst_pipeline_new ("videoclient");
appData->rtspsrc = gst_element_factory_make ("rtspsrc", "rtspsrc");
g_object_set (G_OBJECT (appData->rtspsrc), "location", "rtsp://192.168.1.10:554/myStreamPath", NULL);
appData->depayloader = gst_element_factory_make ("rtph264depay","depayloader");
appData->decoder = gst_element_factory_make ("h264dec", "decoder");
appData->sink = gst_element_factory_make ("autovideosink", "sink");
//then add all elements together
gst_bin_add_many (GST_BIN (appData->pipeline), appData->rtspsrc, appData->depayloader, appData->decoder, appData->sink, NULL);
//link everythink after source
gst_element_link_many (appData->depayloader, appData->decoder, appData->sink, NULL);
/*
* Connect to the pad-added signal for the rtpbin. This allows us to link
* the dynamic RTP source pad to the depayloader when it is created.
*/
g_signal_connect (appData->rtspsrc, "pad-added", G_CALLBACK (pad_added_handler), &appData);
/* Set the pipeline to "playing" state*/
gst_element_set_state (appData->pipeline, GST_STATE_PLAYING);
/* pad added handler */
static void pad_added_handler (GstElement *src, GstPad *new_pad, myData_t *pThis) {
GstPad *sink_pad = gst_element_get_static_pad (pThis->depayloader, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's name */
if (!g_str_has_prefix (GST_PAD_NAME (new_pad), "recv_rtp_src_")) {
g_print (" It is not the right pad. Need recv_rtp_src_. Ignoring.\n");
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" Sink pad from %s already linked. Ignoring.\n", GST_ELEMENT_NAME (src));
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
Hope that this will help someone..:)
you can get verbose error logs by running the apps by --gst-debug=*rtsp*:5 e.g.
./yourApplication --gst-debug=*rtsp*:5