Background: I have an app which takes a pipeline from an USB camera and saves the video output to a file. Now I want to to do that but work with the frames in the application. So I have following code, so during testing I have main pipeline from the camrea to the filesink and appsink (using tee), and secondary from appsource (at this point basically the previously mentioned appsink) to different filesink. So i have:
Pipeline one: camera------>tee--------->filesink1
tee--------->appsink
Pipeline two: (appsink->)appsource------>filesink2
Goal: do not use the tee in the first pipeline, but copy (and later perform something on the -) each sample from the first pipeline reduced to camera--->appsink to the second pipeline.
Pipeline one: camera------>appsink
Pipeline two: (appsink->some work->)appsource------>filesink
Problem: Both testing videos are created, both have equal size. But the one created from appsrc->filesink pipeline claim to have 0s lenght, no matter how long the video actually is. anyone can guess why?
Code:
GstElement *pipeline_main; //main pipeline FROM the camera
GstElement *pipeline_saveVideo;
GstElement *tee; //splitter
GstCaps *caps; //caps currently set to the camera
GstElement *videoApssrc; //source for the pipeline to SAVE the video;
bool record;
static GstFlowReturn new_sample_jpeg(GstElement * elt)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
// get the sample from appsink
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
//if recording, send the sample to recording sink
if (record) gst_app_src_push_sample(GST_APP_SRC(videoApssrc), sample);
...
return ret;
}
bool createPipelineVideo(std::string path){
GstStateChangeReturn ret;
GstElement *muxer, *sink, *queue;
videoApssrc = gst_element_factory_make ("appsrc", "saveVideoSource");
muxer = gst_element_factory_make ("avimux", "avimux");
queue = gst_element_factory_make("queue", "rcr_queue");
sink = gst_element_factory_make ("filesink", "sink");
g_object_set (sink, "location", path().c_str(), NULL);
pipeline_saveVideo = gst_pipeline_new ("pipeline_vid");
if (!pipeline_saveVideo || !videoApssrc || !muxer || !sink) {
g_printerr ("Not all elements could be created.\n");
return false;
}
gst_app_src_set_caps(GST_APP_SRC(videoApssrc), caps);
gst_app_src_set_duration(GST_APP_SRC(videoApssrc), GST_TIME_AS_MSECONDS(80));
gst_app_src_set_stream_type(GST_APP_SRC(videoApssrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(videoApssrc), -1, 0);
gst_bin_add_many (GST_BIN (pipeline_saveVideo), videoApssrc,queue, sink, NULL);
if (gst_element_link_many(videoApssrc, queue, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline_saveVideo);
return false;
}
ret = gst_element_set_state (pipeline_saveVideo, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline_saveVideo);
return false;
}
return true;
}
void startCapturing(){
if (!gst_is_initialized()) {
qWarning()<<"initializing GST";
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink, *queue_rcr, *queue_app, *appsink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", "/dev/video1", NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
queue_rcr = gst_element_factory_make ("queue", "record_queue");
queue_app = gst_element_factory_make ("queue", "app_queue");
appsink = gst_element_factory_make("appsink", "appsink");
g_object_set (sink, "location", "/mnt/test1.avi", NULL);
pipeline_main = gst_pipeline_new ("pipeline_src");
if (!pipeline_main || !source || !muxer || !sink || !queue_rcr || !appsink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline_main), source, muxer,tee, sink,queue_rcr, appsink, queue_app, NULL);
if (gst_element_link_filtered(source, tee, caps) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
if (gst_element_link_many(tee, queue_rcr, muxer, sink, NULL) != TRUE) {
gstFail("Elements could not be linked-recording line\n");
return;
}
if (gst_element_link_many(tee, queue_app, appsink, NULL) != TRUE) {
gstFail("Elements could not be linked-recording line\n");
return;
}
gst_app_sink_set_emit_signals(GST_APP_SINK(appsink), true);
g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_jpeg));
ret = gst_element_set_state (pipeline_main, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gstFail("Unable to set the pipeline to the playing state.\n");
return;
}
// Start playing
createPipelineVideo("/mnt/test2.avi");
record = true;
return;
}
void endVideo(){
record = false;
GstMessage *message = gst_message_new_eos(nullptr);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_set_state (pipeline_main, GST_STATE_PAUSED);
gst_element_set_state (pipeline_main, GST_STATE_NULL);
sleep(1);
gst_object_unref (pipeline_main);
//push the image in the pipeline
GstMessage *EndMessage = gst_message_new_eos(&pipeline_saveVideo->object);
gst_bus_post(pipeline_saveVideo->bus, EndMessage);
/* Free resources */
if (message != NULL)
gst_message_unref (EndMessage);
GstFlowReturn status = GstFlowReturn::GST_FLOW_OK;
status = gst_app_src_end_of_stream(GST_APP_SRC(videoApssrc));
//end the pipeline
usleep(500000);
gst_element_set_state (pipeline_saveVideo, GST_STATE_PAUSED);
gst_element_set_state (pipeline_saveVideo, GST_STATE_NULL);
gst_object_unref (pipeline_saveVideo);
}
Related
the general goal is that i want to play an audio track on my RPi with aplay ("aplay example.mp3") and the output audio gets looped back into an gstreamer program. This program then does a spectrum analysis.
I got the spectrum analysis already working on a static file with this code as source:
data.source = gst_element_factory_make ("uridecodebin", "source");
g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
ofc i want to use the overall output from my RPi as a source for the program but i dont know how. I know i need to loopback the audio from the output to the input and i found that snd-aloop looks promising. Problem is i still dont know how to use it. I tried to do:
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", XXX ,NULL);
where XXX =
"alsa_output.platform-snd_aloop.0.analog-stereo.monitor"
"hw:1"
"hw:0"
Error -> Trying to dispose element sink, but it is in READY instead of the NULL state. You need to explicitly set Elements to the NULL state before dropping the final reference [...]
Bonus question: Is it possible to pipe audio into a gstreamer program? something like: "aplay example.mp3 > gstreamerCprogram".
Here is the code:
#include <gst/gst.h>
#define AUDIOFREQ 32000
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
static gboolean message_handler (GstBus *bus, GstMessage *message, gpointer data){
if(message->type == GST_MESSAGE_EOS){
g_printerr("EOS\n");
}
if(message->type == GST_MESSAGE_ELEMENT){
const GstStructure *s = gst_message_get_structure (message);
const gchar *name = gst_structure_get_name(s);
if(strcmp(name, "spectrum") == 0){
const GValue *magnitudes;
gdouble freq;
magnitudes = gst_structure_get_value (s,"magnitude");
int i = 0;
for(i = 0; i < 20; ++i){
freq = (gdouble)((32000/2) * i + 32000 / 4 / 20);
if(freq > 10000){
g_printerr("%f\n",freq);
}else{
g_printerr("|");
}
}
}
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstCaps *caps;
GstElement *spectrum;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
//____________________________HERE IS THE PROBLEM________________________
//data.source = gst_element_factory_make ("uridecodebin", "source");
//g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", "alsa_output.platform-snd_aloop.0.analog-stereo.monitor",NULL);
//____________________________HERE ENDS THE PROBLEM________________________
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
spectrum = gst_element_factory_make ("spectrum", "spectrum");
caps = gst_caps_new_simple ("audio/x-raw", "rate",G_TYPE_INT, AUDIOFREQ, NULL);
//SET SOME VARIABLES ON SPECTRUM
g_object_set (G_OBJECT (spectrum), "bands", 20, "post-messages", TRUE, "message-phase", TRUE, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink || !caps || !spectrum) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , spectrum,data.sink, NULL);
if (!gst_element_link_many (data.convert, spectrum, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
GMainLoop *loop;
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_watch(bus, message_handler, NULL);
loop = g_main_loop_new (NULL,FALSE);
g_main_loop_run(loop);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
Trying to make gstreamer application from the pipeline: gst-launch-1.0 playbin uri=rtsp:// video-sink="videoconvert ! video/x-raw,width=720, height=480 ! ximagesink"
Getting error to link elements. Posting my code:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "convert");
videosink = gst_element_factory_make("ximagesink", "autovideosink");
/* set property value */
g_object_set (source, "uri", "rtsp:<file location>", NULL);
if (!pipeline || !source || !videoconvert || !videosink)
{
g_printerr ("Not all elements could be created.\n");
return;
}
gst_bin_add_many (GST_BIN(pipeline), videoconvert, videosink, NULL);
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
if (gst_element_link_many( videoconvert, videosink, NULL) != TRUE) {
g_print ("Failed to link some elements .....1 !\n");
gst_object_unref (pipeline);
return -1;
}
/* Start playing */
gst_element_set_state (source, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (source);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
It should be noted that the playbin element gives a full pipeline from src to sink, so the pipeline without the sink stuff would play perfectly:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test
The correct pipeline that you want to create is next:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8551/test video-sink="videoconvert ! video/x-raw,width=320,height=240 ! ximagesink"
In order to embed this into GStreamer application, it is not necessary to link all the elements. Instead of that, the necessary steps to make are to build a custom video output bin and set it to playbin's video-sink property. In other words, the way to go is create a bin and link the necessary elements and then tell playbin to use it via it's video-sink property. It is also necessary to create a GhostPad for the bin and point it to the sink pad of the first element within the bin.
This is the result:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *pipeline, *videoconvert, *customoutput;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean add_ok;
gboolean link_ok;
GstStateChangeReturn ret;
GMainLoop *loop;
/* Initialize GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create Elements */
pipeline = gst_pipeline_new("my-pipeline");
source = gst_element_factory_make ("playbin", "source");
videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 240,
NULL);
videosink = gst_element_factory_make("ximagesink", "videosink");
customoutput = gst_bin_new("customoutput");
// It is possible to create the bin like this
// Ghost pads on the bin for unlinked source or sink pads within the bin can automatically be created
// customoutput = gst_parse_bin_from_description ("videoconvert ! video/x-raw,width=320 ! ximagesink", TRUE, NULL);
gst_bin_add_many (GST_BIN (customoutput), videoconvert, videosink, NULL);
link_ok = gst_element_link_filtered(videoconvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
GstPad *sinkpad,*ghost_sinkpad;
sinkpad = gst_element_get_static_pad (videoconvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (customoutput, ghost_sinkpad);
/* set property value */
g_object_set (source, "video-sink", customoutput, NULL);
g_object_set (source, "uri", "rtsp://127.0.0.1:8551/test", NULL);
if (!pipeline || !source || !videoconvert || !capsFilter || !videosink || !customoutput)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN(pipeline), source,NULL);
// Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running...\n");
g_main_loop_run (loop);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
//bus = gst_element_get_bus (pipeline);
//msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
g_main_loop_unref (loop);
return 0;
}
I'm trying to take several snapshots from a source using gstreamer. With the following code, I succeed to take 9 files but with an EOS from the source (that is actually normal, it's cause by the num-buffers argument):
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *encode;
} CustomData;
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("ffmpegcolorspace", "convert");
data.encode = gst_element_factory_make ("ffenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 9, NULL);
g_object_set(data.sink, "location", "frame%05d.pgm", NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
But my problem is that I want to continue the live after those 9 snapshots. I look for in the tee and queue capabilities but I'm not able to do anything. I think I have to do a dynamical pipeline with a multifilesink element that I paused and played but how to tell it to do only 9 files ? (max-files=9 doesn't work cause the files generated are overwritten)
Thanks
Sure, you need to add probe to count buffers and remove some elements once you don't need them.
I added few fields to your struct:
int count;
GstPad *blockpad;
GstElement *fakesink;
I created one more sink to replace end of pipeline once we saved 9 snapshots:
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
I added probe to srcpad of data.convert:
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
I used GStreamer 1.x so I replaced ffenc_pgm element with avenc_pgm and ffmpegcolorspace element with identity:
#include <stdio.h>
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
int count;
GstPad *blockpad;
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *fakesink;
GstElement *encode;
} CustomData;
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) {
CustomData *data = user_data;
data->count++;
printf("%d\n", data->count);
if (data->count > 9)
{
gst_element_set_state (data->encode, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->encode);
gst_element_set_state (data->sink, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->sink);
gst_bin_add (GST_BIN (data->pipeline), data->fakesink);
gst_element_link (data->convert, data->fakesink);
gst_element_set_state (data->fakesink, GST_STATE_PLAYING);
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
return GST_PAD_PROBE_REMOVE;
}
else
return GST_PAD_PROBE_PASS;
}
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("identity", "convert");
data.encode = gst_element_factory_make ("avenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 20, NULL);
g_object_set (data.sink, "location", "frame%05d.pgm", NULL);
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
These pipelines work perfectly.
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind. Any ideas why this is happening?
I am trying to simulate the following gstreamer pipeline using the C API:
gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
videotestsrc pattern=0 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix.
So far I have:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline;
GstElement *source1,*source2;
GstElement *scale,*filter;
GstElement *videobox1,*videobox2; //just one.
GstElement *mixer,*clrspace,*sink;
GstCaps *filtercaps;
GstBus *bus;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/*if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}*/
//gst-launch videotestsrc pattern=snow ! ximagesink
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("player");
source1 = gst_element_factory_make ("videotestsrc", "source1");
source2 = gst_element_factory_make ("videotestsrc", "source2");
// source2 = gst_element_factory_make ("uridecodebin", "file-source2");
scale = gst_element_factory_make ("videoscale", "scale");
filter = gst_element_factory_make("capsfilter","filter");
videobox1 = gst_element_factory_make ("videobox", "videobox1");
videobox2 = gst_element_factory_make ("videobox", "videobox2");
mixer = gst_element_factory_make ("videomixer", "mixer");
clrspace = gst_element_factory_make ("ffmpegcolorspace", "clrspace");
// demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
// decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
// conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("xvimagesink", "sink");
/*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}*/
if (!pipeline || !source1 || !source2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
//gst_caps_unref (filtercaps);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
g_object_set (G_OBJECT (source2), "pattern", 1, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);
/* we link the elements together */
//gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
//gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);
/* Set the pipeline to "playing" state*/
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
I have also set debugging on: export GST_DEBUG=3
When I run my program I get the following error:
Running...
0:00:00.178663884 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444 4797 0x8937020 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746 4797 0x893ae00 WARN basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907 4797 0x893ae00 WARN basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline
Why is it complaining about source2 not-linked?
A little late, but may be helpful:
If you look at the documentation for the videomixer element, you'll see that videomixer's sink pads are request pads. You need to create these pads before linking them.
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);
Similarly request pads are created for as many streams as you want.
gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);
Please also learn about the x,y,z properties on videomixerpad, you can spare the videobox elements this way and gain performance