rtspsrc internal data stream error gstreamer c-appliaction - c

gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
I'm trying to write the pipeline as c application. This command runs successfully. But C-application give error Error received from element udpsrc1: Internal data stream error.
Here's my c code:
#include <gst/gst.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _Data {
GstElement *pipeline;
GstElement *source;
GstElement *videoQueue;
GstElement *videoParser;
GstElement *videoDepayloader;
GstElement *videoDecoder;
GstElement *videoSink;
GstElement *videoConvert;
GstElement *videoScale;
} Data;
int main(int argc, char *argv[]) {
Data data;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean terminate = FALSE;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (NULL,NULL);
data.source = gst_element_factory_make ("rtspsrc", "source");
data.videoQueue = gst_element_factory_make ("queue", "videoQueue");
data.videoDepayloader = gst_element_factory_make ("rtph264depay", "videoDepayloader");
data.videoDecoder = gst_element_factory_make ("vpudec", "videoDecoder");
data.videoSink = gst_element_factory_make ("waylandsink", "videoSink");
data.videoParser = gst_element_factory_make("h264parse", "videoParser");
data.videoConvert = gst_element_factory_make("videoconvert", "video-convert");
data.videoScale = gst_element_factory_make("videoscale", "videoScale");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("rtsp-pipeline");
if (!data.source) {
g_printerr ("source elements could be created.\n");
return -1;
}
if(!data.videoQueue){
g_printerr ("videoQueue elements could be created.\n");
return -1;
}
if(!data.videoDepayloader){
g_printerr ("videoDepayloader elements could be created.\n");
return -1;
}
if(!data.videoDecoder){
g_printerr ("videoDecoder elements could be created.\n");
return -1;
}
if(!data.videoSink){
g_printerr ("videoSink elements could be created.\n");
return -1;
}
if(!data.videoParser){
g_printerr ("videoParser elements could be created.\n");
return -1;
}
if(!data.videoConvert){
g_printerr ("videoConvert elements could be created.\n");
return -1;
}
if(!data.videoScale){
g_printerr ("videoScale elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(data.source, "location", "rtsp://192.168.1.43/h264cif", NULL);
g_object_set(data.source, "latency", 0, NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.videoDepayloader, data.videoQueue, data.videoParser, data.videoDecoder, data.videoConvert, data.videoScale, data.videoSink, NULL);
//GST_DEBUG=4 gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
if (!(gst_element_link_many(data.videoDepayloader, data.videoQueue,
data.videoParser, data.videoDecoder,
data.videoScale, data.videoConvert, data.videoSink, NULL)))
{
g_printerr("Error linking fields... \n");
exit (-1);
}
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_print ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
exit(1);
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}

It seems like you did not link your source element:
gst_element_link_many(data.source, data.videoDepayloader, /* ... */ ,NULL)
And be sure to link the elements in the same order as your command line pipeline.

Related

elements could not be linked, which elements to link?(gstreamer c)

I've been studying about gstreamer and I don't understand where I got it wrong but every pipeline I tried turning code would give me 'Elements could not be linked' and I'm running this code on ubuntu 20.04 which has an upstream kernel version 5.13.0-35(which I think is not a concern) with gcc 9 and here is the code:
#include <gst/gst.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstCaps *caps;
GstElement *depay;
GstElement *parse;
GstElement *decode;
GstElement *convert;
GstElement *sink;
} CustomData;
int main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("udpsrc", "source");
data.caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 90000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
data.depay = gst_element_factory_make ("rtph264depay", "depay");
data.parse = gst_element_factory_make ("h264parse", "parse");
data.decode = gst_element_factory_make ("decodebin", "decode");
data.convert = gst_element_factory_make ("videoconvert", "convert");
data.sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.depay || !data.parse || !data.decode || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL);
if (gst_element_link_many (data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the port and caps to play */
g_object_set (data.source, "port", 5000, NULL);
g_object_set (data.source, "caps", data.caps, NULL);
g_object_set (data.sink, "sync", FALSE, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
the recieving pipeline is:
gst-launch-1.0 -v udpsrc port=5000 ! "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false
And the sender is:
gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux ! h264parse ! avdec_h264 ! x264enc ! rtph264pay ! udpsink host=$HOST port=5000
Thanks in advance.
You may try using gst_parse_launch() that will negociate caps as gst-launch does:
const gchar *pipeline_str = "udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false";
GstElement *pipeline = gst_parse_launch (pipeline_str, NULL);
if (!pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
...
Otherwise, you may have to add capsfilters.

Gstreamer pipeline works with gst-launch but not in code. Reproducing a mjpeg stream from a IP camera

I want to reproduce a mjpeg stream from a intercom (but it's equivalent to a IP camera). Using gst-launch in the console works fine:
gst-launch-1.0 souphttpsrc location="http://192.168.1.191/api/camera/snapshot?width=640&height=480&fps=10" timeout=5 ! multipartdemux ! jpegdec ! videoconvert ! ximagesink
However, when I try to build an application to do this, it doesn't work.
My code:
#include <gst/gst.h>
#include <glib.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *v_demux;
GstElement *v_decoder;
GstElement *v_convert;
GstElement *v_sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
/** Main function */
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements
*
* souphttpsrc -> multipartdemux (~>) jpegdec -> videoconvert -> ximagesink
*
* ~> Sometimes pad
*
* */
data.source = gst_element_factory_make ("souphttpsrc", "video_source");
data.v_demux = gst_element_factory_make ("multipartdemux", "video_demux");
data.v_decoder = gst_element_factory_make ("jpegdec", "video_decoder");
data.v_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.v_sink = gst_element_factory_make ("ximagesink", "video_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("new-pipeline");
if (!data.pipeline || !data.source ||
!data.v_demux || !data.v_decoder || !data.v_convert || !data.v_sink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(G_OBJECT(data.source), "location", argv[1], NULL);
g_object_set(G_OBJECT(data.source), "timeout", 5, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.source,
data.v_demux, data.v_decoder, data.v_convert, data.v_sink,
NULL);
if (gst_element_link_many (data.source, data.v_demux, NULL) != TRUE ||
gst_element_link_many (data.v_decoder, data.v_convert, data.v_sink, NULL) != TRUE ) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.v_demux, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = NULL;
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Get information of the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Get pad from the correspondent converter */
if (g_str_has_prefix (new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad (data->v_decoder, "sink");
} else {
g_print (" It has type '%s' -> So exit\n", new_pad_type);
return;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
gst_object_unref (sink_pad);
return;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL) {
gst_caps_unref (new_pad_caps);
}
/* Unreference the sink pad */
if (sink_pad != NULL) {
gst_object_unref (sink_pad);
}
}
The output when I run the program:
Pipeline state changed from NULL to READY:
Pipeline state changed from READY to PAUSED:
Error received from element video_demux: Could not demultiplex stream.
Debugging information: multipartdemux.c(475): multipart_parse_header (): /GstPipeline:new-pipeline/GstMultipartDemux:video_demux:
Boundary not found in the multipart header
Any idea what am I missing?
Thanks in advance.
I have found sometimes that adding queues helps, maybe one before the jpegdec? Also maybe try a jpegparse before the jpegdec.

Snapshot with Gstreamer without EOS

I'm trying to take several snapshots from a source using gstreamer. With the following code, I succeed to take 9 files but with an EOS from the source (that is actually normal, it's cause by the num-buffers argument):
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *encode;
} CustomData;
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("ffmpegcolorspace", "convert");
data.encode = gst_element_factory_make ("ffenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 9, NULL);
g_object_set(data.sink, "location", "frame%05d.pgm", NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
But my problem is that I want to continue the live after those 9 snapshots. I look for in the tee and queue capabilities but I'm not able to do anything. I think I have to do a dynamical pipeline with a multifilesink element that I paused and played but how to tell it to do only 9 files ? (max-files=9 doesn't work cause the files generated are overwritten)
Thanks
Sure, you need to add probe to count buffers and remove some elements once you don't need them.
I added few fields to your struct:
int count;
GstPad *blockpad;
GstElement *fakesink;
I created one more sink to replace end of pipeline once we saved 9 snapshots:
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
I added probe to srcpad of data.convert:
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
I used GStreamer 1.x so I replaced ffenc_pgm element with avenc_pgm and ffmpegcolorspace element with identity:
#include <stdio.h>
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
int count;
GstPad *blockpad;
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *fakesink;
GstElement *encode;
} CustomData;
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) {
CustomData *data = user_data;
data->count++;
printf("%d\n", data->count);
if (data->count > 9)
{
gst_element_set_state (data->encode, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->encode);
gst_element_set_state (data->sink, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->sink);
gst_bin_add (GST_BIN (data->pipeline), data->fakesink);
gst_element_link (data->convert, data->fakesink);
gst_element_set_state (data->fakesink, GST_STATE_PLAYING);
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
return GST_PAD_PROBE_REMOVE;
}
else
return GST_PAD_PROBE_PASS;
}
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("identity", "convert");
data.encode = gst_element_factory_make ("avenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 20, NULL);
g_object_set (data.sink, "location", "frame%05d.pgm", NULL);
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}

creating a pipeline to transmit voice

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?

display image using gstreamer with c API

i try to do a gstreamer pipeline using c API to show image for this i use this gst-launch command
gst-launch filesrc location="pluto.jpg" ! jpegdec ! ffmpegcolorspace ! videobalance saturation=0 ! freeze ! ximagesink
when i try it it work fine but when i try to convert it to c code it doesn't work someone can help me please ?
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *jpdec, *imgf, *cod, *source, *sink;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
sink = gst_element_factory_make ("ximagesink", "sink");
jpdec = gst_element_factory_make ("jpegdec", "jdec");
imgf = gst_element_factory_make ("imagefreeze", "freeze");
cod = gst_element_factory_make ("ffmpegcolorspace", "ffmdec");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !jpdec || !imgf || !cod) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, jpdec, cod, imgf, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (G_OBJECT (source), "location","pluto.jpg", NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
there is the c code that i use to play image
when i compile the code i don't have errors but when i run it i have this erreur :
(test:5355): GStreamer-CRITICAL **: gst_caps_get_structure: assertion `GST_IS_CAPS (caps)' failed
(test:5355): GStreamer-CRITICAL **: gst_structure_get_int: assertion `structure != NULL' failed
Error received from element sink: Failed to create output image buffer of 0x0 pixels
Debugging information: ximagesink.c(472): gst_ximagesink_ximage_new (): /GstPipeline:test-pipeline/GstXImageSink:sink:
could not get shared memory of 0 bytes
Your gst_element_link is wrong. Something like:
if (gst_element_link_many (source, jpdec, cod, imgf, sink, NULL) != TRUE)
should work.
Those errors are likely a bug in xvimagesink, but you are using it wrongly. Feel free to report a bug at bugzilla.gnome.org about these assertions in case they happen with 1.0.

Resources