g_signal_connect "pad-added" doesn't work - c

I am trying learning how to use dynamic pads in gstreamer. So I tried to add pad-added signal so I can get a message once an element is created. However, I didn't get any message.
Here is the code:
#include <gst/gst.h>
static void
cb_new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
gchar *name;
name = gst_pad_get_name (pad);
g_print ("A new pad %s was created\n", name);
g_free (name);
/* here, you would setup a new pad link for the newly created pad */
}
int
main (int argc,
char *argv[])
{
GstElement *pipeline, *source, *demux;
GMainLoop *loop;
/* init */
gst_init (&argc, &argv);
/* create elements */
pipeline = gst_pipeline_new ("my_pipeline");
source = gst_element_factory_make ("filesrc", "source");
g_object_set (source, "location", argv[1], NULL);
demux = gst_element_factory_make ("oggdemux", "demuxer");
/* put together a pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL);
gst_element_link_pads (source, "src", demux, "sink");
/* listen for newly created pads */
g_signal_connect (demux, "pad-added", G_CALLBACK (cb_new_pad), NULL);
/* start the pipeline */
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (loop);
}
So what is the problem ? (By the way I am using gstreamer 1.2.1

Your code worked fine for me.
Your demuxer probably could not demultiplex the stream, check the input file that you are providing. It is probably not a valid ogg file.
On a related note, do add debugging code to your program i.e. listen to the bus for messages. It helps a lot when something doesn't work.
The basic tutorial 3 of the gstreamer sdk is a perfect example for what you're trying to do.

Related

Gstreamer appsink to rtsp server with appsrc as source large latency

I'm having pipeline with appsink which pushes samples to appsrc which acts as a source to pipeline created by rtsp server. It works, I can connect to rtsp server and see the streamed video. The problem is latency. For some reason a lot of buffers is queued in the appsrc and the viewed stream has latency of more than two seconds.
I tried to find the source of latency and it looks like the data are started to being read from appsrc source pad after some time from the point the pipeline is started. The delay between the point the pipeline is started and the point data start to be read out from appsrc source pad is then transformed to it's latency.
I found this by reading out how many bytes is queued in appsrc each time I push the buffer to it. This value which I read out is continuously rising for some time. When the read out of data starts the current amout of the bytes stored in appsrc queue stay approximately the same for the rest of the time I stream the video.
Here is my test application which I'm using to test the correct functionality of this design.
#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <time.h>
#include <gst/rtsp-server/rtsp-server.h>
GMainLoop *loop;
GstElement *appsink;
GstElement *appsrc;
GstElement *appsink_pipeline;
/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str);
return TRUE;
}
static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i;
g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx);
return;
}
if (gst_caps_is_empty (caps)) {
g_print ("%sEMPTY\n", pfx);
return;
}
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx);
}
}
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static GstFlowReturn
on_new_sample_from_sink (GstElement * elt, void * data)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
guint64 bytes;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
if(appsrc)
{
bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
g_print("buffered bytes before push %lu\n", bytes);
ret = gst_app_src_push_sample(GST_APP_SRC (appsrc), sample);
// bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
// if(ret == GST_FLOW_OK)
// g_print("pushed ok - buffered bytes after push %lu\n", bytes);
}
gst_sample_unref (sample);
return ret;
}
/* called when we get a GstMessage from the source pipeline when we get EOS, we
* notify the appsrc of it. */
static gboolean
on_source_message (GstBus * bus, GstMessage * message, void * data)
{
gint percent;
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("The source got dry\n");
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_BUFFERING:
gst_message_parse_buffering (message, &percent);
g_print ("Buffering = %d\n", percent);
break;
default:
break;
}
return TRUE;
}
static GstFlowReturn need_data (GstElement * appsrc_loc,
guint length,
gpointer udata)
{
g_print("Need data\n");
return GST_FLOW_OK;
}
/* this timeout is periodically run to clean up the expired sessions from the
* pool. This needs to be run explicitly currently but might be done
* automatically as part of the mainloop. */
static gboolean
timeout (GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool (server);
gst_rtsp_session_pool_cleanup (pool);
g_object_unref (pool);
return TRUE;
}
void clientConnected(GstRTSPServer* server, GstRTSPClient* client, gpointer user)
{
g_print("%s\n", __func__);
}
static void media_state_cb(GstRTSPMedia *media, GstState state)
{
g_print("media state = %d\n", state);
}
static void
media_construct (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'appsrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
g_signal_connect (appsrc, "need-data",
G_CALLBACK (need_data), NULL);
g_signal_connect (media, "new-state",
G_CALLBACK (media_state_cb), NULL);
gst_object_unref (element);
}
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstPad *pad;
GstCaps *caps;
gchar *caps_str;
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
pad = gst_element_get_static_pad (appsink, "sink");
if(pad)
{
g_print("Got pad\n");
caps = gst_pad_get_current_caps (pad);
if(caps)
{
caps_str = gst_caps_to_string (caps);
g_print("Got caps %s\n", caps_str);
g_object_set (G_OBJECT (appsrc), "caps", caps, NULL);
gst_caps_unref(caps);
}
}
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
gst_object_unref (element);
}
int main (int argc, char *argv[]){
GstBus *bus;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gchar src[] = "nvv4l2camerasrc device=/dev/video0 ! video/x-raw(memory:NVMM), width=1920, height=1080, format=UYVY, framerate=60/1 ! "
" queue max-size-buffers=3 leaky=downstream ! "
" nvvidconv name=conv ! video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=60/1 ! "
" nvv4l2h264enc control-rate=1 bitrate=8000000 preset-level=1 profile=0 disable-cabac=1 maxperf-enable=1 name=encoder insert-sps-pps=1 insert-vui=1 idrinterval=30 ! "
" appsink name=appsink sync=false max-buffers=3";
gchar sink[] = "( appsrc name=appsrc format=3 stream-type=0 is-live=true blocksize=2097152 max-bytes=200000 ! "
" queue max-size-buffers=3 leaky=no ! "
" rtph264pay config-interval=1 name=pay0 )";
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create pipeline with appsink */
g_print("Creating pipeline with appsink\n");
appsink_pipeline = gst_parse_launch (src, NULL);
if (appsink_pipeline == NULL) {
g_print ("Bad source\n");
g_main_loop_unref (loop);
return -1;
}
/* to be notified of messages from this pipeline, mostly EOS */
bus = gst_element_get_bus (appsink_pipeline);
gst_bus_add_watch (bus, (GstBusFunc) on_source_message, appsink_pipeline);
gst_object_unref (bus);
/* Create push_buffer callback for appsink */
g_print("Creating push buffer callback\n");
appsink = gst_bin_get_by_name (GST_BIN (appsink_pipeline), "appsink");
g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample",
G_CALLBACK (on_new_sample_from_sink), NULL);
/* Create rtsp server with pipeline starting with appsrc */
g_print("Creating rtsp server\n");
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, sink);
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach (server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds (2, (GSourceFunc) timeout, server);
g_signal_connect (server, "client-connected",
G_CALLBACK (clientConnected), NULL);
/* Create media-constructed callback to get appsrc reference */
g_print("Creating media-constructed callback\n");
g_signal_connect (factory, "media-constructed", (GCallback) media_construct,
NULL);
g_signal_connect (factory, "media-configure", (GCallback) media_configure,
NULL);
/* Push buffers from appsink to appsrc */
/* start serving, this never stops */
g_print("Running main loop\n");
gst_element_set_state (appsink_pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (appsink_pipeline, GST_STATE_NULL);
return 0;
/* ERRORS */
failed:
{
g_print ("failed to attach the server\n");
return -1;
}
}
I will appreciate every idea about what can cause this behavior and how to solve this.
Thanks a lot!
This latency problem may be due to many reasons but most of the time this problem is due to frames are not in SYNC. There is a lot of data in the queue.
To counter this problem need to test these test cases to find out the real problem.
Check the behavior with videotestsrc instead of the camera source.
Are you sure that after nvv4l2camerasrc queue is needed what will be the output if you skip the queue element.
You can also check with lower resolution input to get something from it.
what happened if you use v4l2src instead of nvv4l2camerasrc if your camera soruce is v4l2 complaince.
Thanks

Gstreamer call backs are not hitting after setting pipeline to playing state

I am writing a media application to grab video frames from a video file. For this, I want to get the video properties before pulling the samples from the pipeline. So, I have added a callback for auto-plug signal at the decoder and trying to get the properties. These callbacks are not getting called even after I put the pipeline into playing state, but these are called if I try to pull a sample from the pipeline using gst_app_sink_pull_sample.
Am I missing anything here? My understanding is that these callbacks will get invoked when we put the pipeline into playing state.
#include <gst/gst.h>
#include <stdio.h>
static void bus_callback (GstBus *bus, GstMessage *msg, gpointer data)
{
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
default:
/* Unhandled message */
break;
}
}
static void
on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the decoder sink pad */
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
static void
auto_plug_select (GstElement *decoder, GstPad *pad, GstCaps *caps,
GstElementFactory *factory, int *width )
{
const gchar *klass = gst_element_factory_get_klass (factory);
/* MW_customData *cdata = (MW_customData*) data;*/
GstCaps *scaps = gst_pad_query_caps (pad, NULL);
GstStructure *str = gst_caps_get_structure (scaps, 0);
const gchar *type = gst_structure_get_name (str);
printf (" Pad cap: %s\n", type);
if (g_strrstr(type,"video"))
{
gst_structure_get_int (str, "width", width);
printf(" Width: %d\n", *width);
}
}
int main (gint argc,
gchar *argv[])
{
GstElement *pipeline, *filesrc, *decoder, *fakesink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
/* check args */
if (argc != 2) {
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
/* Bus call back*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* create file source and typefind element */
filesrc = gst_element_factory_make ("filesrc", "source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
decoder = gst_element_factory_make ("decodebin", NULL);
fakesink = gst_element_factory_make ("fakesink", "sink");
int width = 0;
/* Connect the sink pad when decoder completes the operation */
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), &width);
g_signal_connect (decoder, "autoplug-select", G_CALLBACK (auto_plug_select), fakesink);
/* setup */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, fakesink, NULL);
gst_element_link (filesrc, decoder);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
return 0;
}
You do not leave the pipeline any time for running. You probably stop it before data can trigger the decodebin's callbacks.
For being cheap try:
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_usleep(100000000);
printf(" Width: %d\n", width);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
But more correct would be to use a real GMainLoop and act on certain event to stop the pipeline again.
EDIT: P.S. Why not GstDiscoverer? https://gstreamer.freedesktop.org/documentation/pbutils/gstdiscoverer.html?gi-language=c

Linking Gstrplaybin to custom video sink videoconvert and sink not linking gstreamer

Trying to make gstreamer application from the pipeline: gst-launch-1.0 playbin uri=rtsp:// video-sink="videoconvert ! video/x-raw,width=720, height=480 ! ximagesink"
Getting error to link elements. Posting my code:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "convert");
videosink = gst_element_factory_make("ximagesink", "autovideosink");
/* set property value */
g_object_set (source, "uri", "rtsp:<file location>", NULL);
if (!pipeline || !source || !videoconvert || !videosink)
{
g_printerr ("Not all elements could be created.\n");
return;
}
gst_bin_add_many (GST_BIN(pipeline), videoconvert, videosink, NULL);
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
if (gst_element_link_many( videoconvert, videosink, NULL) != TRUE) {
g_print ("Failed to link some elements .....1 !\n");
gst_object_unref (pipeline);
return -1;
}
/* Start playing */
gst_element_set_state (source, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (source);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
It should be noted that the playbin element gives a full pipeline from src to sink, so the pipeline without the sink stuff would play perfectly:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test
The correct pipeline that you want to create is next:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test video-sink="videoconvert ! video/x-raw,width=320,height=240 ! ximagesink"
In order to embed this into GStreamer application, it is not necessary to link all the elements. Instead of that, the necessary steps to make are to build a custom video output bin and set it to playbin's video-sink property. In other words, the way to go is create a bin and link the necessary elements and then tell playbin to use it via it's video-sink property. It is also necessary to create a GhostPad for the bin and point it to the sink pad of the first element within the bin.
This is the result:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert, *customoutput;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean add_ok;
gboolean link_ok;
GstStateChangeReturn ret;
GMainLoop *loop;
/* Initialize GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 240,
NULL);
videosink = gst_element_factory_make("ximagesink", "videosink");
customoutput = gst_bin_new("customoutput");
// It is possible to create the bin like this
// Ghost pads on the bin for unlinked source or sink pads within the bin can automatically be created
// customoutput = gst_parse_bin_from_description ("videoconvert ! video/x-raw,width=320 ! ximagesink", TRUE, NULL);
gst_bin_add_many (GST_BIN (customoutput), videoconvert, videosink, NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
GstPad *sinkpad,*ghost_sinkpad;
sinkpad = gst_element_get_static_pad (videoconvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (customoutput, ghost_sinkpad);
/* set property value */
g_object_set (source, "video-sink", customoutput, NULL);
g_object_set (source, "uri", "rtsp://127.0.0.1:8551/test", NULL);
if (!pipeline || !source || !videoconvert || !capsFilter || !videosink || !customoutput)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN(pipeline), source,NULL);
// Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running...\n");
g_main_loop_run (loop);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
//bus = gst_element_get_bus (pipeline);
//msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
g_main_loop_unref (loop);
return 0;
}

Gstreamer HLS pipeline creation error

I am trying to stream a HLS Stream using gstreamer 1.0. I have a filter which works
gst-launch-1.0 -v souphttpsrc location="http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8" ! hlsdemux ! tsdemux ! h264parse ! avdec_h264 ! autovideosink
But when I try to convert this into c code, it fails.
int main(int argc, char* argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer, *tsdemux, *h264parse, *vdecoder, *vsink;
GstElement *aacparse, *adecoder, *aconvert, *asink;
GstBus *bus;
int bus_watch_id;
gst_debug_set_default_threshold(3);
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
if (argc != 2)
{
g_printerr("Usage: %s <http stream source>\n", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("myApp");
source = gst_element_factory_make("souphttpsrc", "http-src");
demuxer = gst_element_factory_make("hlsdemux", "hls-demuxer");
tsdemux = gst_element_factory_make("tsdemux", "ts-demuxer");
h264parse = gst_element_factory_make("h264parse", "h264parse");
vdecoder = gst_element_factory_make("avdec_h264", "h264decoder");
vsink = gst_element_factory_make("autovideosink", "videosink");
/* set the input url to the source element */
g_object_set(G_OBJECT(source), "location", argv[1], NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
/* add elements into the pipeline */ //next aacparse
gst_bin_add_many(GST_BIN (pipeline), source, demuxer, tsdemux, h264parse, vdecoder,vsink, NULL);
gst_element_link(source, demuxer);
// this was wrong
/*gst_element_link_many(tsdemux, h264parse, vdecoder, vsink, NULL);*/
/* connect demuxer and decoder on pad added */
/*g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), vdecoder);*/
// Correct Implementation
gst_element_link_many(h264parse, vdecoder, vsink, NULL);
g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), tsdemux);
g_signal_connect(tsdemux, "pad-added", G_CALLBACK(on_pad_added), h264parse);
g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), vdecoder);
// Correct Implementation
g_print ("Starting play: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run(loop);
/* Clean up after execution of main loop */
g_print ("Stopping Playback: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print ("Quitting\n");
g_object_unref(G_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
I compile the code using:
cc my_app.c -o my_app $(pkg-config --cflags --libs gstreamer-1.0)
And launch the application using:
./my_app http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8
I always get the following error:
hlsdemux gsthlsdemux.c:792:gst_hls_demux_stream_loop:<hls-demuxer> error: stream stopped, reason not-negotiated
Do I have to do anything differently?
// this was wrong
gst_element_link(source, demuxer);
// Correct Implementation
gst_element_link(source, demuxer,NULL);
where is your void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data); function?and take a look at here.
I was able to get it working. The problem was "tsdemux" and "demuxer" have to connect at run time in "on_pad_added" method.

GStreamer caps filtering issue when converting from command line to C code

I am having issues converting my working GStreamer pipeline from a command line version to C code. From the command line the following command will successfully play my headerless mu-law audio file:
gst-launch filesrc location=test.ulaw ! audio/x-mulaw, rate=8000, channels=1 ! mulawdec ! audioconvert ! audioresample ! autoaudiosink
However, my issues are arising when trying to add in the "audio/x-mulaw, rate=8000, channels=1" bit into my C program. The program started off playing wav files (using wavparse in place of mulawdec) so that I know my base C code works, and it must just be that I am misinterpreting how the caps bit needs to be added in to make it work with mu-law files.
I am creating the caps, then using the gst_element_link_filtered to use this:
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
But this is not working, and running the program produces the following output:
>gst-mulaw.exe test.ulaw
Playing.
Error: Internal data flow error.
Playback Finished.
I would be grateful if anyone is able to help shed some light on what I am doing wrong. The full code is given below:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *gstSourceElement, GstPad *gstSourcePad, gpointer data) {
g_print("Linking dynamic pad.\n");
GstPad *gstSinkPad;
GstElement *gstSinkElement = (GstElement *) data;
gstSinkPad = gst_element_get_static_pad (gstSinkElement, "sink");
gst_pad_link (gstSourcePad, gstSinkPad);
gst_object_unref (gstSinkPad);
}
int main (int argc, char *argv[]) {
GMainLoop *loop;
GstElement *gstPipeline, *gstFileSource, *gstMuLawDecoder, *gstAudioConvert, *gstAudioResample, *gstAudioSink;
GstBus *bus;
// GStreamer initialisation.
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
// Check input arguments.
if (argc != 2) {
g_printerr ("Usage: %s <mu-law File>\n", argv[0]);
return -1;
}
// Create the GStreamer elements.
gstPipeline = gst_pipeline_new ("player");
gstFileSource = gst_element_factory_make ("filesrc", "filesource");
gstMuLawDecoder = gst_element_factory_make ("mulawdec", "mulawdecoder");
gstAudioConvert = gst_element_factory_make ("audioconvert", "audioconverter");
gstAudioResample = gst_element_factory_make ("audioresample", "audioresampler");
gstAudioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!gstPipeline || !gstFileSource || !gstMuLawDecoder || !gstAudioConvert || !gstAudioResample || !gstAudioSink) {
g_printerr ("An element could not be created. Exiting.\n");
return -1;
}
// Linke the filesrc object to that passed on the command line.
g_object_set (G_OBJECT (gstFileSource), "location", argv[1], NULL);
// Setup the GStreamer bus.
bus = gst_pipeline_get_bus (GST_PIPELINE (gstPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// Add the objects to the pipeline.
gst_bin_add_many (GST_BIN (gstPipeline), gstFileSource, gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
// Set the pipeline to state playing, and run the main loop.
g_print ("Playing.\n");
gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
// Finished playback, cleanup.
g_print ("Playback Finished.\n");
gst_element_set_state (gstPipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (gstPipeline));
return 0;
}
Thanks.
Try changing
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
g_signal_connect (gstMuLawDecoder, "pad-added", G_CALLBACK (on_pad_added), gstAudioConvert);
to
// Link the elements together.
GstCaps *gstMuLawCaps = gst_caps_from_string("audio/x-mulaw, rate=8000, channels=1");
gst_element_link_filtered(gstFileSource, gstMuLawDecoder, gstMuLawCaps);
gst_caps_unref(gstMuLawCaps);
gst_element_link_many (gstMuLawDecoder, gstAudioConvert, gstAudioResample, gstAudioSink, NULL);
pad-added handling is needed for elements with sometimes pads (e.g. demuxers). You can remove the on_pad_added callback function (it was not called anyway, right?) The pad-type can be see in "gst-inspect mulawdec" output.

Resources