elements could not be linked, which elements to link?(gstreamer c) - c

I've been studying about gstreamer and I don't understand where I got it wrong but every pipeline I tried turning code would give me 'Elements could not be linked' and I'm running this code on ubuntu 20.04 which has an upstream kernel version 5.13.0-35(which I think is not a concern) with gcc 9 and here is the code:
#include <gst/gst.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstCaps *caps;
GstElement *depay;
GstElement *parse;
GstElement *decode;
GstElement *convert;
GstElement *sink;
} CustomData;
int main (int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("udpsrc", "source");
data.caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 90000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
data.depay = gst_element_factory_make ("rtph264depay", "depay");
data.parse = gst_element_factory_make ("h264parse", "parse");
data.decode = gst_element_factory_make ("decodebin", "decode");
data.convert = gst_element_factory_make ("videoconvert", "convert");
data.sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.depay || !data.parse || !data.decode || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL);
if (gst_element_link_many (data.source, data.depay, data.parse, data.decode, data.convert, data.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the port and caps to play */
g_object_set (data.source, "port", 5000, NULL);
g_object_set (data.source, "caps", data.caps, NULL);
g_object_set (data.sink, "sync", FALSE, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
the recieving pipeline is:
gst-launch-1.0 -v udpsrc port=5000 ! "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false
And the sender is:
gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux ! h264parse ! avdec_h264 ! x264enc ! rtph264pay ! udpsink host=$HOST port=5000
Thanks in advance.

You may try using gst_parse_launch() that will negociate caps as gst-launch does:
const gchar *pipeline_str = "udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! decodebin ! videoconvert ! autovideosink sync=false";
GstElement *pipeline = gst_parse_launch (pipeline_str, NULL);
if (!pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
...
Otherwise, you may have to add capsfilters.

Related

Gstreamer x264enc invalid buffer size c

I've been trying to use this gstreamer c code(my system is running on ubuntu 20.04 with GStreamer 1.16.2 and gcc 9.4.0):
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
// gst-launch-1.0 videotestsrc pattern=ball ! 'video/x-raw, format=(string)I420, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! \
// ! queue ! nvvideoconvert ! nvv4l2h264enc bitrate=1000000 ! rtph264pay ! udpsink host=192.168.0.1 port=5000
int
main (int argc, char *argv[])
{
GstElement *pipeline, *source, *filter, *queue, *converter, *encoder, *payer, *sink;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("filesrc", "source");
filter = gst_element_factory_make ("capsfilter","filter");
queue = gst_element_factory_make ("queue","queue");
converter = gst_element_factory_make ("videoconvert","converter");
encoder = gst_element_factory_make ("x264enc","encoder");
payer = gst_element_factory_make ("rtph264pay","payer");
sink = gst_element_factory_make ("udpsink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("maxim-pipeline");
if (!pipeline || !source || !filter || !queue || !converter || !encoder || !payer || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, filter, queue, converter, encoder, payer, sink, NULL);
if (!gst_element_link_many (source,filter,queue,NULL)){
g_printerr ("Source->filter->queue problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many (queue,converter,encoder,NULL)){
g_printerr ("Queue->converter->encoder problem\n");
gst_object_unref (pipeline);
return -1;
}
if (!gst_element_link_many(encoder,payer,sink,NULL)){
g_printerr ("Encoder->payer->sink problem\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the properties */
g_object_set (source, "location", "/home/thmsd/Videos/test.mkv", NULL);
g_object_set (encoder, "bitrate", 2000000, NULL);
g_object_set (sink, "host","192.168.0.1", NULL);
g_object_set (sink, "port",5000, NULL);
g_object_set (sink, "sync", "FALSE", NULL);
filtercaps = gst_caps_new_simple ("video/x-raw",
"format",G_TYPE_STRING,"I420",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate",GST_TYPE_FRACTION,30,1,
NULL);
g_object_set (filter, "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n",
GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n",
debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
But the following error happens which is actually referring to x264enc, and btw this code is edited, at first it was jetson compatible and had nvv4l2h264enc:
The test video that I'm trying to stream has the following properties:
First note that gstreamer booleans are not typed with capitals, so use true and false or just use 0 and 1.
x264enc may not be that fast on Jetson. You may try the following :
#include <gst/gst.h>
int main (gint argc, gchar * argv[])
{
gst_init (&argc, &argv);
GMainLoop *loop = g_main_loop_new (NULL, FALSE);
GError *error = NULL;
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvv4l2h264enc bitrate=20000000 insert-sps-pps=1 insert-vui=1 idrinterval=15 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
/* x264enc may be much slower :*/
/*
char* gst_pipeline_str = "filesrc location=/home/nvidia/Videos/bbb_sunflower_1080p_60fps_normal.mkv ! matroskademux ! parsebin ! nvv4l2decoder ! nvvidconv ! video/x-raw ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004";
*/
/* Create the pipeline */
GstElement *pipeline = gst_parse_launch (gst_pipeline_str, &error);
if (error || !pipeline) {
g_error ("Failed to create pipeline\n");
exit(-1);
}
/* This will output changes in terminal, you may remove it later to make it quiet. */
g_signal_connect(pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify), NULL);
/* Ok, successfully created the pipeline, now start it */
gst_element_set_state (pipeline, GST_STATE_READY);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
exit(-2);
}
/* You may have to further manage bus for EOS... */
g_print ("Running ...\n");
g_main_loop_run (loop);
return 0;
}
This is what I've successfully tested on Jetson (AGX Xavier running L4T R32.6.1).
You would adapt to your file source and receiver address/port, and save as test_transcode_MKV_to_RTPH264.c then build with:
gcc -Wall -o test_transcode_MKV_to_RTPH264 test_transcode_MKV_to_RTPH264.c `pkg-config --cflags --libs gstreamer-1.0 gobject-2.0 glib-2.0`
and test streaming:
./test_transcode_MKV_to_RTPH264
Then if receiver has gstreamer installed, you should be able to display with something like:
gst-launch-1.0 udpsrc port=5004 ! application/x-rtp,encoding-name=H264 ! rtpjitterbuffer latency=300 ! rtph264depay ! decodebin ! autovideosink
For receiving RTPH264 with FFMPEG or VLC, you may have to create a SDP file.
EDIT: for non-NVIDIA case, you may try:
filesrc location=test_h265.mkv ! matroskademux ! h265parse ! avdec_h265 ! videoconvert ! x264enc bitrate=20000 tune=zerolatency insert-vui=1 key-int-max=30 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5004

rtspsrc internal data stream error gstreamer c-appliaction

gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
I'm trying to write the pipeline as c application. This command runs successfully. But C-application give error Error received from element udpsrc1: Internal data stream error.
Here's my c code:
#include <gst/gst.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _Data {
GstElement *pipeline;
GstElement *source;
GstElement *videoQueue;
GstElement *videoParser;
GstElement *videoDepayloader;
GstElement *videoDecoder;
GstElement *videoSink;
GstElement *videoConvert;
GstElement *videoScale;
} Data;
int main(int argc, char *argv[]) {
Data data;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean terminate = FALSE;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (NULL,NULL);
data.source = gst_element_factory_make ("rtspsrc", "source");
data.videoQueue = gst_element_factory_make ("queue", "videoQueue");
data.videoDepayloader = gst_element_factory_make ("rtph264depay", "videoDepayloader");
data.videoDecoder = gst_element_factory_make ("vpudec", "videoDecoder");
data.videoSink = gst_element_factory_make ("waylandsink", "videoSink");
data.videoParser = gst_element_factory_make("h264parse", "videoParser");
data.videoConvert = gst_element_factory_make("videoconvert", "video-convert");
data.videoScale = gst_element_factory_make("videoscale", "videoScale");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("rtsp-pipeline");
if (!data.source) {
g_printerr ("source elements could be created.\n");
return -1;
}
if(!data.videoQueue){
g_printerr ("videoQueue elements could be created.\n");
return -1;
}
if(!data.videoDepayloader){
g_printerr ("videoDepayloader elements could be created.\n");
return -1;
}
if(!data.videoDecoder){
g_printerr ("videoDecoder elements could be created.\n");
return -1;
}
if(!data.videoSink){
g_printerr ("videoSink elements could be created.\n");
return -1;
}
if(!data.videoParser){
g_printerr ("videoParser elements could be created.\n");
return -1;
}
if(!data.videoConvert){
g_printerr ("videoConvert elements could be created.\n");
return -1;
}
if(!data.videoScale){
g_printerr ("videoScale elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(data.source, "location", "rtsp://192.168.1.43/h264cif", NULL);
g_object_set(data.source, "latency", 0, NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.videoDepayloader, data.videoQueue, data.videoParser, data.videoDecoder, data.videoConvert, data.videoScale, data.videoSink, NULL);
//GST_DEBUG=4 gst-launch-1.0 rtspsrc location=rtsp://192.168.1.43:554/stream0 latency=0 name=src src. ! rtph264depay ! queue ! h264parse ! vpudec ! videoconvert ! videoscale ! waylandsink window-width=352 window-height=288
if (!(gst_element_link_many(data.videoDepayloader, data.videoQueue,
data.videoParser, data.videoDecoder,
data.videoScale, data.videoConvert, data.videoSink, NULL)))
{
g_printerr("Error linking fields... \n");
exit (-1);
}
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_print ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
exit(1);
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
It seems like you did not link your source element:
gst_element_link_many(data.source, data.videoDepayloader, /* ... */ ,NULL)
And be sure to link the elements in the same order as your command line pipeline.

How to convert the following gstreamer commandline into c

gst-launch-1.0 --gst-debug-level=3 dvbsrc modulation='QAM 256'
frequency=147000000 ! decodebin name=demux demux. ! queue !
audioresample ! audioconvert ! voaacenc ! mux. mpegtsmux name=mux
! udpsink host=127.0.0.0 port=22 demux. ! queue ! videoconvert !
videoscale ! x264enc bitrate=1240 tune=zerolatency
!video/x-h264,stream-format=byte-stream,profile=high,width=540,height=380,key-int-max=15
! mux.
I want to convert into c with the same thing.
I am trying -:
#include <gst/gst.h>
#include <glib.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (gint argc, gchar *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer,*dec,*vdqueue,*adqueue, *conv, *sink;
GstBus *bus;
/* init GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* setup */
pipeline = gst_pipeline_new ("pipeline");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
source = gst_element_factory_make ("filesrc", "filesource");
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
g_printerr ("source is: %s \n",argv[1]);
//demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
vdqueue = gst_element_factory_make ("queue", "video-queue");
adqueue = gst_element_factory_make ("queue", "audio-queue");
dec = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("autoaudiosink", "audio-output");
/* g_object_set (pipeline->mux,"max-delay", (guint64)0,NULL);
g_object_set (pipeline->mux,"max-page-delay",(guint64)0,NULL);
*/
g_object_set (G_OBJECT (source), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB16",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL), NULL);
gst_bin_add_many (GST_BIN (pipeline),source,demuxer,vdqueue,adqueue,dec,conv,sink, NULL);
gst_element_link (source, demuxer);
gst_element_link (demuxer, vdqueue);
gst_element_link (vdqueue, adqueue);
gst_element_link (adqueue, dec);
gst_element_link (dec, conv);
gst_element_link (conv, sink);
/* run */
// g_printerr ("pipeline is: %s \n",pipeline);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
/* cleanup */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}

creating a pipeline to transmit voice

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?

How to program videomixer using Gstreamer C API

I am trying to simulate the following gstreamer pipeline using the C API:
gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
videotestsrc pattern=0 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix.
So far I have:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline;
GstElement *source1,*source2;
GstElement *scale,*filter;
GstElement *videobox1,*videobox2; //just one.
GstElement *mixer,*clrspace,*sink;
GstCaps *filtercaps;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/*if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}*/
//gst-launch videotestsrc pattern=snow ! ximagesink
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("videotestsrc", "source1");
source2 = gst_element_factory_make ("videotestsrc", "source2");
// source2 = gst_element_factory_make ("uridecodebin", "file-source2");
scale = gst_element_factory_make ("videoscale", "scale");
filter = gst_element_factory_make("capsfilter","filter");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
mixer = gst_element_factory_make ("videomixer", "mixer");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
// demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
// decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
// conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("xvimagesink", "sink");
/*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}*/
if (!pipeline || !source1 || !source2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
//gst_caps_unref (filtercaps);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
g_object_set (G_OBJECT (source2), "pattern", 1, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);
/* we link the elements together */
//gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
//gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have also set debugging on: export GST_DEBUG=3
When I run my program I get the following error:
Running...
0:00:00.178663884 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746 4797 0x893ae00 WARN basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907 4797 0x893ae00 WARN basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline
Why is it complaining about source2 not-linked?
A little late, but may be helpful:
If you look at the documentation for the videomixer element, you'll see that videomixer's sink pads are request pads. You need to create these pads before linking them.
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);
Similarly request pads are created for as many streams as you want.
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);
Please also learn about the x,y,z properties on videomixerpad, you can spare the videobox elements this way and gain performance

Resources