display image using gstreamer with c API - c

i try to do a gstreamer pipeline using c API to show image for this i use this gst-launch command
gst-launch filesrc location="pluto.jpg" ! jpegdec ! ffmpegcolorspace ! videobalance saturation=0 ! freeze ! ximagesink
when i try it it work fine but when i try to convert it to c code it doesn't work someone can help me please ?
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *jpdec, *imgf, *cod, *source, *sink;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
sink = gst_element_factory_make ("ximagesink", "sink");
jpdec = gst_element_factory_make ("jpegdec", "jdec");
imgf = gst_element_factory_make ("imagefreeze", "freeze");
cod = gst_element_factory_make ("ffmpegcolorspace", "ffmdec");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !jpdec || !imgf || !cod) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, jpdec, cod, imgf, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (G_OBJECT (source), "location","pluto.jpg", NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
there is the c code that i use to play image
when i compile the code i don't have errors but when i run it i have this erreur :
(test:5355): GStreamer-CRITICAL **: gst_caps_get_structure: assertion `GST_IS_CAPS (caps)' failed
(test:5355): GStreamer-CRITICAL **: gst_structure_get_int: assertion `structure != NULL' failed
Error received from element sink: Failed to create output image buffer of 0x0 pixels
Debugging information: ximagesink.c(472): gst_ximagesink_ximage_new (): /GstPipeline:test-pipeline/GstXImageSink:sink:
could not get shared memory of 0 bytes

Your gst_element_link is wrong. Something like:
if (gst_element_link_many (source, jpdec, cod, imgf, sink, NULL) != TRUE)
should work.
Those errors are likely a bug in xvimagesink, but you are using it wrongly. Feel free to report a bug at bugzilla.gnome.org about these assertions in case they happen with 1.0.

Related

Gstreamer x264enc invalid buffer size c

I've been trying to use this gstreamer c code(my system is running on ubuntu 20.04 with GStreamer 1.16.2 and gcc 9.4.0):
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
// gst-launch-1.0 videotestsrc pattern=ball ! 'video/x-raw, format=(string)I420, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! \
// ! queue ! nvvideoconvert ! nvv4l2h264enc bitrate=1000000 ! rtph264pay ! udpsink host=192.168.0.1 port=5000
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *filter, *queue, *converter, *encoder, *payer, *sink;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
filter = gst_element_factory_make ("capsfilter","filter");
queue = gst_element_factory_make ("queue","queue");
converter = gst_element_factory_make ("videoconvert","converter");
encoder = gst_element_factory_make ("x264enc","encoder");
payer = gst_element_factory_make ("rtph264pay","payer");
sink = gst_element_factory_make ("udpsink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("maxim-pipeline");
if (!pipeline || !source || !filter || !queue || !converter || !encoder || !payer || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, filter, queue, converter, encoder, payer, sink, NULL);
if (!gst_element_link_many (source,filter,queue,NULL)){
g_printerr ("Source->filter->queue problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many (queue,converter,encoder,NULL)){
g_printerr ("Queue->converter->encoder problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many(encoder,payer,sink,NULL)){
g_printerr ("Encoder->payer->sink problem\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the properties */
g_object_set (source, "location", "/home/thmsd/Videos/test.mkv", NULL);
g_object_set (encoder, "bitrate", 2000000, NULL);
g_object_set (sink, "host","192.168.0.1", NULL);
g_object_set (sink, "port",5000, NULL);
g_object_set (sink, "sync", "FALSE", NULL);
filtercaps = gst_caps_new_simple ("video/x-raw",
"format",G_TYPE_STRING,"I420",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate",GST_TYPE_FRACTION,30,1,
NULL);
g_object_set (filter, "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
But the following error happens which is actually referring to x264enc, and btw this code is edited, at first it was jetson compatible and had nvv4l2h264enc:
The test video that I'm trying to stream has the following properties:
First note that gstreamer booleans are not typed with capitals, so use true and false or just use 0 and 1.
x264enc may not be that fast on Jetson. You may try the following :
#include <gst/gst.h>
int main (gint argc, gchar * argv[])
{
gst_init (&argc, &argv);
GMainLoop *loop = g_main_loop_new (NULL, FALSE);
GError *error = NULL;
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvv4l2h264enc bitrate=20000000 insert-sps-pps=1 insert-vui=1 idrinterval=15 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
/* x264enc may be much slower :*/
/*
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvvidconv ! video/x-raw ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
*/
/* Create the pipeline */
GstElement *pipeline = gst_parse_launch (gst_pipeline_str, &error);
if (error || !pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
/* This will output changes in terminal, you may remove it later to make it quiet. */
g_signal_connect(pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify), NULL);
/* Ok, successfully created the pipeline, now start it */
gst_element_set_state (pipeline, GST_STATE_READY);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
exit(-2);
}
/* You may have to further manage bus for EOS... */
g_print ("Running ...\n");
g_main_loop_run (loop);
return 0;
}
This is what I've successfully tested on Jetson (AGX Xavier running L4T R32.6.1).
You would adapt to your file source and receiver address/port, and save as test_transcode_MKV_to_RTPH264.c then build with:
gcc -Wall -o test_transcode_MKV_to_RTPH264 test_transcode_MKV_to_RTPH264.c `pkg-config --cflags --libs gstreamer-1.0 gobject-2.0 glib-2.0`
and test streaming:
./test_transcode_MKV_to_RTPH264
Then if receiver has gstreamer installed, you should be able to display with something like:
gst-launch-1.0 udpsrc port=5004 ! application/x-rtp,encoding-name=H264 ! rtpjitterbuffer latency=300 ! rtph264depay ! decodebin ! autovideosink
For receiving RTPH264 with FFMPEG or VLC, you may have to create a SDP file.
EDIT: for non-NVIDIA case, you may try:
filesrc location=test_h265.mkv ! matroskademux ! h265parse ! avdec_h265 ! videoconvert ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 key-int-max=30 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004

rtspsrc internal data stream error gstreamer c-appliaction

gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
I'm trying to write the pipeline as c application. This command runs successfully. But C-application give error Error received from element udpsrc1: Internal data stream error.
Here's my c code:
#include <gst/gst.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _Data {
GstElement *pipeline;
GstElement *source;
GstElement *videoQueue;
GstElement *videoParser;
GstElement *videoDepayloader;
GstElement *videoDecoder;
GstElement *videoSink;
GstElement *videoConvert;
GstElement *videoScale;
} Data;
int main(int argc, char *argv[]) {
Data data;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean terminate = FALSE;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (NULL,NULL);
data.source = gst_element_factory_make ("rtspsrc", "source");
data.videoQueue = gst_element_factory_make ("queue", "videoQueue");
data.videoDepayloader = gst_element_factory_make ("rtph264depay", "videoDepayloader");
data.videoDecoder = gst_element_factory_make ("vpudec", "videoDecoder");
data.videoSink = gst_element_factory_make ("waylandsink", "videoSink");
data.videoParser = gst_element_factory_make("h264parse", "videoParser");
data.videoConvert = gst_element_factory_make("videoconvert", "video-convert");
data.videoScale = gst_element_factory_make("videoscale", "videoScale");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("rtsp-pipeline");
if (!data.source) {
g_printerr ("source elements could be created.\n");
return -1;
}
if(!data.videoQueue){
g_printerr ("videoQueue elements could be created.\n");
return -1;
}
if(!data.videoDepayloader){
g_printerr ("videoDepayloader elements could be created.\n");
return -1;
}
if(!data.videoDecoder){
g_printerr ("videoDecoder elements could be created.\n");
return -1;
}
if(!data.videoSink){
g_printerr ("videoSink elements could be created.\n");
return -1;
}
if(!data.videoParser){
g_printerr ("videoParser elements could be created.\n");
return -1;
}
if(!data.videoConvert){
g_printerr ("videoConvert elements could be created.\n");
return -1;
}
if(!data.videoScale){
g_printerr ("videoScale elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(data.source, "location", "rtsp://192.168.1.43/h264cif", NULL);
g_object_set(data.source, "latency", 0, NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.videoDepayloader, data.videoQueue, data.videoParser, data.videoDecoder, data.videoConvert, data.videoScale, data.videoSink, NULL);
//GST_DEBUG=4 gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
if (!(gst_element_link_many(data.videoDepayloader, data.videoQueue,
data.videoParser, data.videoDecoder,
data.videoScale, data.videoConvert, data.videoSink, NULL)))
{
g_printerr("Error linking fields... \n");
exit (-1);
}
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_print ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
exit(1);
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
It seems like you did not link your source element:
gst_element_link_many(data.source, data.videoDepayloader, /* ... */ ,NULL)
And be sure to link the elements in the same order as your command line pipeline.

Gstreamer pipeline works with gst-launch but not in code. Reproducing a mjpeg stream from a IP camera

I want to reproduce a mjpeg stream from a intercom (but it's equivalent to a IP camera). Using gst-launch in the console works fine:
gst-launch-1.0 souphttpsrc location="http://192.168.1.191/api/camera/snapshot?width=640&height=480&fps=10" timeout=5 ! multipartdemux ! jpegdec ! videoconvert ! ximagesink
However, when I try to build an application to do this, it doesn't work.
My code:
#include <gst/gst.h>
#include <glib.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *v_demux;
GstElement *v_decoder;
GstElement *v_convert;
GstElement *v_sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
/** Main function */
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements
*
* souphttpsrc -> multipartdemux (~>) jpegdec -> videoconvert -> ximagesink
*
* ~> Sometimes pad
*
* */
data.source = gst_element_factory_make ("souphttpsrc", "video_source");
data.v_demux = gst_element_factory_make ("multipartdemux", "video_demux");
data.v_decoder = gst_element_factory_make ("jpegdec", "video_decoder");
data.v_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.v_sink = gst_element_factory_make ("ximagesink", "video_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("new-pipeline");
if (!data.pipeline || !data.source ||
!data.v_demux || !data.v_decoder || !data.v_convert || !data.v_sink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(G_OBJECT(data.source), "location", argv[1], NULL);
g_object_set(G_OBJECT(data.source), "timeout", 5, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.source,
data.v_demux, data.v_decoder, data.v_convert, data.v_sink,
NULL);
if (gst_element_link_many (data.source, data.v_demux, NULL) != TRUE ||
gst_element_link_many (data.v_decoder, data.v_convert, data.v_sink, NULL) != TRUE ) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.v_demux, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = NULL;
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Get information of the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Get pad from the correspondent converter */
if (g_str_has_prefix (new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad (data->v_decoder, "sink");
} else {
g_print (" It has type '%s' -> So exit\n", new_pad_type);
return;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
gst_object_unref (sink_pad);
return;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL) {
gst_caps_unref (new_pad_caps);
}
/* Unreference the sink pad */
if (sink_pad != NULL) {
gst_object_unref (sink_pad);
}
}
The output when I run the program:
Pipeline state changed from NULL to READY:
Pipeline state changed from READY to PAUSED:
Error received from element video_demux: Could not demultiplex stream.
Debugging information: multipartdemux.c(475): multipart_parse_header (): /GstPipeline:new-pipeline/GstMultipartDemux:video_demux:
Boundary not found in the multipart header
Any idea what am I missing?
Thanks in advance.
I have found sometimes that adding queues helps, maybe one before the jpegdec? Also maybe try a jpegparse before the jpegdec.

Snapshot with Gstreamer without EOS

I'm trying to take several snapshots from a source using gstreamer. With the following code, I succeed to take 9 files but with an EOS from the source (that is actually normal, it's cause by the num-buffers argument):
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *encode;
} CustomData;
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("ffmpegcolorspace", "convert");
data.encode = gst_element_factory_make ("ffenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 9, NULL);
g_object_set(data.sink, "location", "frame%05d.pgm", NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
But my problem is that I want to continue the live after those 9 snapshots. I look for in the tee and queue capabilities but I'm not able to do anything. I think I have to do a dynamical pipeline with a multifilesink element that I paused and played but how to tell it to do only 9 files ? (max-files=9 doesn't work cause the files generated are overwritten)
Thanks
Sure, you need to add probe to count buffers and remove some elements once you don't need them.
I added few fields to your struct:
int count;
GstPad *blockpad;
GstElement *fakesink;
I created one more sink to replace end of pipeline once we saved 9 snapshots:
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
I added probe to srcpad of data.convert:
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
I used GStreamer 1.x so I replaced ffenc_pgm element with avenc_pgm and ffmpegcolorspace element with identity:
#include <stdio.h>
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
int count;
GstPad *blockpad;
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *fakesink;
GstElement *encode;
} CustomData;
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) {
CustomData *data = user_data;
data->count++;
printf("%d\n", data->count);
if (data->count > 9)
{
gst_element_set_state (data->encode, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->encode);
gst_element_set_state (data->sink, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->sink);
gst_bin_add (GST_BIN (data->pipeline), data->fakesink);
gst_element_link (data->convert, data->fakesink);
gst_element_set_state (data->fakesink, GST_STATE_PLAYING);
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
return GST_PAD_PROBE_REMOVE;
}
else
return GST_PAD_PROBE_PASS;
}
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("identity", "convert");
data.encode = gst_element_factory_make ("avenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 20, NULL);
g_object_set (data.sink, "location", "frame%05d.pgm", NULL);
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}

creating a pipeline to transmit voice

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?

Resources