How to modify playback speed of audio stream in Gstreamer? - c

Hi everyone I am trying to change the playback speed of an audio file using the gstreamer library in c. I've followed most of the tutorials on the gstreamer website but the only thing that is not working is the playback speed.
The way it is set up right now, the speed should be doubled when a '.' is encountered but nothing happens. Can any experienced gstreamer users provide some insight?
typedef struct bindata {
GMainLoop *loop;
GstElement *pipeline, *source, *mp3decoder, *volume, *pulseout;
gboolean playing;
} bindata;
static bindata data;
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer *misc){
//GMainLoop *loop = (GMainLoop *) misc;
switch (GST_MESSAGE_TYPE(msg)){
case GST_MESSAGE_EOS: {
g_message("End of stream.\n");
g_main_loop_quit(data.loop);
break;
}
case GST_MESSAGE_ERROR:{
GError *error;
gst_message_parse_error(msg, &error, NULL);
g_printerr("%s\n", error->message);
g_error_free(error);
g_main_loop_quit(data.loop);
break;
}
default: break;
}
return TRUE;
}
static gboolean keypress (GIOChannel *src, GIOCondition cond, bindata *data){
int c;
gdouble vol;
GstFormat format = GST_FORMAT_TIME;
//if(g_io_channel_read_unichar(src, str, NULL) != G_IO_STATUS_NORMAL){
if((c = getchar()) == EOF ){
return TRUE;
}
switch(c){
case '+':
g_object_get(data->volume, "volume", &vol,NULL);
if (vol >= 10) break;
g_object_set (data->volume, "volume", vol + 0.1, NULL);
break;
case '-':
g_object_get(data->volume, "volume", &vol, NULL);
if (vol <= 0.1) break;
g_object_set (data->volume, "volume", vol - 0.1, NULL);
break;
case '.':
g_print("speed up \n");
gst_element_send_event(data->pulseout, gst_event_new_step(format, 20, 2.0, TRUE, FALSE));
break;
case ',':
g_print("speed down \n");
break;
case ' ':
data->playing = !data->playing;
gst_element_set_state (data->pipeline, data->playing ? GST_STATE_PLAYING : GST_STATE_PAUSED);
break;
default:
break;
}
return TRUE;
}
int main(int argc, char *argv[]){
GstBus *bus;
guint bus_watch_id;
GIOChannel *io_stdin;
gst_init(&argc, &argv);
memset (&data, 0, sizeof(data));
data.loop = g_main_loop_new(NULL, false);
if(argc != 2){
g_printerr("Usage: ./play <URI: mp3 file>");
return -1;
}
io_stdin = g_io_channel_unix_new (fileno (stdin));
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc) keypress, &data);
data.pipeline = gst_pipeline_new ("audio-player");
data.source = gst_element_factory_make ("filesrc", "file source");
data.mp3decoder = gst_element_factory_make ("mad", "mad mp3");
data.volume = gst_element_factory_make ("volume", "volume");
data.pulseout = gst_element_factory_make ("pulsesink", "pulse audio");
if(!data.pipeline || !data.source || !data.mp3decoder || !data.pulseout || !data.volume) {
g_printerr("Some element(s) could not be created. Exiting. \n");
return -1;
}
g_object_set (G_OBJECT (data.source), "location", argv[1], NULL);
g_object_set (G_OBJECT(data.volume), "volume", 0.01, NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(data.pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, NULL);
gst_object_unref(bus);
gst_bin_add_many(GST_BIN (data.pipeline), data.source, data.mp3decoder, data.volume, data.pulseout, NULL);
gst_element_link_many (data.source, data.mp3decoder, data.volume, data.pulseout, NULL);
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
data.playing = TRUE;
g_print ("Running...\n");
g_main_loop_run(data.loop);
g_print ("ended. \n");
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(data.pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (data.loop);
return 0;
}

Controlling playback speed is done through seeking, with gst_event_new_seek or gst_element_seek, by setting the rate property, see the documentation at https://developer.gnome.org/gstreamer/stable/gstreamer-GstEvent.html#gst-event-new-seek and http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/GstElement.html#gst-element-seek

the following, while oriented on video rather than audio,
shows how to change the playback speed.
it is from:
<http://docs.gstreamer.com/display/GstSDK/Basic+tutorial+13%3A+Playback+speed>
#include <string.h>
#include <gst/gst.h>
typedef struct _CustomData {
GstElement *pipeline;
GstElement *video_sink;
GMainLoop *loop;
gboolean playing; /* Playing or Paused */
gdouble rate; /* Current playback rate (can be negative) */
} CustomData;
/* Send seek event to change rate */
static void send_seek_event (CustomData *data) {
gint64 position;
GstFormat format = GST_FORMAT_TIME;
GstEvent *seek_event;
/* Obtain the current position, needed for the seek event */
if (!gst_element_query_position (data->pipeline, &format, &position)) {
g_printerr ("Unable to retrieve current position.\n");
return;
}
/* Create the seek event */
if (data->rate > 0) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0);
} else {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
}
if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the seek events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
/* Send the event */
gst_element_send_event (data->video_sink, seek_event);
g_print ("Current rate: %g\n", data->rate);
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
return TRUE;
}
switch (g_ascii_tolower (str[0])) {
case 'p':
data->playing = !data->playing;
gst_element_set_state (data->pipeline, data->playing ? GST_STATE_PLAYING : GST_STATE_PAUSED);
g_print ("Setting state to %s\n", data->playing ? "PLAYING" : "PAUSE");
break;
case 's':
if (g_ascii_isupper (str[0])) {
data->rate *= 2.0;
} else {
data->rate /= 2.0;
}
send_seek_event (data);
break;
case 'd':
data->rate *= -1.0;
send_seek_event (data);
break;
case 'n':
if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE));
g_print ("Stepping one frame\n");
break;
case 'q':
g_main_loop_quit (data->loop);
break;
default:
break;
}
g_free (str);
return TRUE;
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Print usage map */
g_print (
"USAGE: Choose one of the following options, then press enter:\n"
" 'P' to toggle between PAUSE and PLAY\n"
" 'S' to increase playback speed, 's' to decrease playback speed\n"
" 'D' to toggle playback direction\n"
" 'N' to move to next frame (in the current direction, better in PAUSE)\n"
" 'Q' to quit\n");
/* Build the pipeline */
data.pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
#else
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
data.playing = TRUE;
data.rate = 1.0;
/* Create a GLib Main Loop and set it to run */
data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop);
/* Free resources */
g_main_loop_unref (data.loop);
g_io_channel_unref (io_stdin);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
if (data.video_sink != NULL)
gst_object_unref (data.video_sink);
gst_object_unref (data.pipeline);
return 0;
}

Related

problem to play media with gstreamer and srt protocol, language c

I'm new to gstreamer and I'm trying to output the video on another port (with the srt protocol). So far I have done this and it doesn't work:
typedef struct _CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *sink;
} CustomData;
static void pad_added_handler (GstElement * src, GstPad * pad, CustomData * data);
int main (int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source");
data.sink = gst_element_make_from_uri (GST_URI_SINK,"srt://my_uri", NULL, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.sink, NULL);
/* Set the URI to play */
g_object_set (data.source, "uri", "srt://my_uri", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
...
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
and pad_added_handler, the function will be called by the padd-added signal:
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement * src, GstPad * new_pad, CustomData * data)
{
GstPad *sink_pad = gst_element_get_static_pad (data->sink, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "video/x-raw")) {
g_print ("It has type '%s' which is not raw audio. Ignoring.\n",new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print ("Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print ("Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
I don't get any error when running the code but when I try to read the media I get the following error: "Operation not supported: Invalid socket ID"
Thank you in advance for your help

Need an example to retrieve stream statistics of output stream in Gstreamer C code?

I'm new to C coding and I am writing a basic transcoding program for a project I am working on. I was wondering if anyone has a basic example which would allow me to capture the output statistics for example (actual bitrate for video and audio, framerate, resolution size, video h264 level, etc)
Please see below code:
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
static gboolean
cb_print_position (GstElement *pipeline)
{
gint64 pos;
if (gst_element_query_position (pipeline, GST_FORMAT_TIME, &pos))
{
g_print ("Time: %" GST_TIME_FORMAT "\r", GST_TIME_ARGS (pos));
}
/* call me again */
return TRUE;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *videotestsrcm, *x264encm, *rtmpsinkm, *flvmuxm;
GstBus *bus;
guint bus_watch_id;
/* Initialisation */
gst_init (&argc, &argv);
const gchar*nano_str;guint major, minor, micro, nano;
gst_init (&argc, &argv);
gst_version (&major, &minor, &micro, &nano);
if (nano == 1)
nano_str = "(CVS)";
else if (nano == 2)
nano_str = "(Prerelease)";
else
nano_str = "";
printf ("This program is linked against GStreamer %d.%d.%d%s\n",major, minor, micro, nano_str);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("videotest-pipeline");
videotestsrcm = gst_element_factory_make ("videotestsrc", "testsource");
x264encm = gst_element_factory_make ("x264enc", "videoencoder");
rtmpsinkm = gst_element_factory_make ("rtmpsink", "video2sink");
flvmuxm = gst_element_factory_make ("flvmux", "muxer");
if (!pipeline || !videotestsrcm || !x264encm || !rtmpsinkm || !flvmuxm) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
g_object_set (G_OBJECT (rtmpsinkm), "location" , argv[1] , NULL);
/* Set up the pipeline */
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
videotestsrcm, x264encm, rtmpsinkm, flvmuxm, NULL);
/* we link the elements together */
/* videotestsrcm -> autovideosinkm */
gst_element_link_many (videotestsrcm, x264encm, flvmuxm, rtmpsinkm, NULL);
/* Set the pipeline to "playing" state*/
g_print ("Now set pipeline in state playing...\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* run pipeline */
g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

how to use alsa snd-aloop with gstreamer?

the general goal is that i want to play an audio track on my RPi with aplay ("aplay example.mp3") and the output audio gets looped back into an gstreamer program. This program then does a spectrum analysis.
I got the spectrum analysis already working on a static file with this code as source:
data.source = gst_element_factory_make ("uridecodebin", "source");
g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
ofc i want to use the overall output from my RPi as a source for the program but i dont know how. I know i need to loopback the audio from the output to the input and i found that snd-aloop looks promising. Problem is i still dont know how to use it. I tried to do:
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", XXX ,NULL);
where XXX =
"alsa_output.platform-snd_aloop.0.analog-stereo.monitor"
"hw:1"
"hw:0"
Error -> Trying to dispose element sink, but it is in READY instead of the NULL state. You need to explicitly set Elements to the NULL state before dropping the final reference [...]
Bonus question: Is it possible to pipe audio into a gstreamer program? something like: "aplay example.mp3 > gstreamerCprogram".
Here is the code:
#include <gst/gst.h>
#define AUDIOFREQ 32000
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
static gboolean message_handler (GstBus *bus, GstMessage *message, gpointer data){
if(message->type == GST_MESSAGE_EOS){
g_printerr("EOS\n");
}
if(message->type == GST_MESSAGE_ELEMENT){
const GstStructure *s = gst_message_get_structure (message);
const gchar *name = gst_structure_get_name(s);
if(strcmp(name, "spectrum") == 0){
const GValue *magnitudes;
gdouble freq;
magnitudes = gst_structure_get_value (s,"magnitude");
int i = 0;
for(i = 0; i < 20; ++i){
freq = (gdouble)((32000/2) * i + 32000 / 4 / 20);
if(freq > 10000){
g_printerr("%f\n",freq);
}else{
g_printerr("|");
}
}
}
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstCaps *caps;
GstElement *spectrum;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
//____________________________HERE IS THE PROBLEM________________________
//data.source = gst_element_factory_make ("uridecodebin", "source");
//g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", "alsa_output.platform-snd_aloop.0.analog-stereo.monitor",NULL);
//____________________________HERE ENDS THE PROBLEM________________________
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
spectrum = gst_element_factory_make ("spectrum", "spectrum");
caps = gst_caps_new_simple ("audio/x-raw", "rate",G_TYPE_INT, AUDIOFREQ, NULL);
//SET SOME VARIABLES ON SPECTRUM
g_object_set (G_OBJECT (spectrum), "bands", 20, "post-messages", TRUE, "message-phase", TRUE, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink || !caps || !spectrum) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , spectrum,data.sink, NULL);
if (!gst_element_link_many (data.convert, spectrum, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
GMainLoop *loop;
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_watch(bus, message_handler, NULL);
loop = g_main_loop_new (NULL,FALSE);
g_main_loop_run(loop);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}

Gstreamer pipeline works with gst-launch but not in code. Reproducing a mjpeg stream from a IP camera

I want to reproduce a mjpeg stream from a intercom (but it's equivalent to a IP camera). Using gst-launch in the console works fine:
gst-launch-1.0 souphttpsrc location="http://192.168.1.191/api/camera/snapshot?width=640&height=480&fps=10" timeout=5 ! multipartdemux ! jpegdec ! videoconvert ! ximagesink
However, when I try to build an application to do this, it doesn't work.
My code:
#include <gst/gst.h>
#include <glib.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *v_demux;
GstElement *v_decoder;
GstElement *v_convert;
GstElement *v_sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
/** Main function */
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements
*
* souphttpsrc -> multipartdemux (~>) jpegdec -> videoconvert -> ximagesink
*
* ~> Sometimes pad
*
* */
data.source = gst_element_factory_make ("souphttpsrc", "video_source");
data.v_demux = gst_element_factory_make ("multipartdemux", "video_demux");
data.v_decoder = gst_element_factory_make ("jpegdec", "video_decoder");
data.v_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.v_sink = gst_element_factory_make ("ximagesink", "video_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("new-pipeline");
if (!data.pipeline || !data.source ||
!data.v_demux || !data.v_decoder || !data.v_convert || !data.v_sink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(G_OBJECT(data.source), "location", argv[1], NULL);
g_object_set(G_OBJECT(data.source), "timeout", 5, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.source,
data.v_demux, data.v_decoder, data.v_convert, data.v_sink,
NULL);
if (gst_element_link_many (data.source, data.v_demux, NULL) != TRUE ||
gst_element_link_many (data.v_decoder, data.v_convert, data.v_sink, NULL) != TRUE ) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.v_demux, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = NULL;
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Get information of the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Get pad from the correspondent converter */
if (g_str_has_prefix (new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad (data->v_decoder, "sink");
} else {
g_print (" It has type '%s' -> So exit\n", new_pad_type);
return;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
gst_object_unref (sink_pad);
return;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL) {
gst_caps_unref (new_pad_caps);
}
/* Unreference the sink pad */
if (sink_pad != NULL) {
gst_object_unref (sink_pad);
}
}
The output when I run the program:
Pipeline state changed from NULL to READY:
Pipeline state changed from READY to PAUSED:
Error received from element video_demux: Could not demultiplex stream.
Debugging information: multipartdemux.c(475): multipart_parse_header (): /GstPipeline:new-pipeline/GstMultipartDemux:video_demux:
Boundary not found in the multipart header
Any idea what am I missing?
Thanks in advance.
I have found sometimes that adding queues helps, maybe one before the jpegdec? Also maybe try a jpegparse before the jpegdec.

Snapshot with Gstreamer without EOS

I'm trying to take several snapshots from a source using gstreamer. With the following code, I succeed to take 9 files but with an EOS from the source (that is actually normal, it's cause by the num-buffers argument):
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *encode;
} CustomData;
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("ffmpegcolorspace", "convert");
data.encode = gst_element_factory_make ("ffenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 9, NULL);
g_object_set(data.sink, "location", "frame%05d.pgm", NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
But my problem is that I want to continue the live after those 9 snapshots. I look for in the tee and queue capabilities but I'm not able to do anything. I think I have to do a dynamical pipeline with a multifilesink element that I paused and played but how to tell it to do only 9 files ? (max-files=9 doesn't work cause the files generated are overwritten)
Thanks
Sure, you need to add probe to count buffers and remove some elements once you don't need them.
I added few fields to your struct:
int count;
GstPad *blockpad;
GstElement *fakesink;
I created one more sink to replace end of pipeline once we saved 9 snapshots:
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
I added probe to srcpad of data.convert:
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
I used GStreamer 1.x so I replaced ffenc_pgm element with avenc_pgm and ffmpegcolorspace element with identity:
#include <stdio.h>
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
int count;
GstPad *blockpad;
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
GstElement *fakesink;
GstElement *encode;
} CustomData;
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) {
CustomData *data = user_data;
data->count++;
printf("%d\n", data->count);
if (data->count > 9)
{
gst_element_set_state (data->encode, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->encode);
gst_element_set_state (data->sink, GST_STATE_NULL);
gst_bin_remove (GST_BIN (data->pipeline), data->sink);
gst_bin_add (GST_BIN (data->pipeline), data->fakesink);
gst_element_link (data->convert, data->fakesink);
gst_element_set_state (data->fakesink, GST_STATE_PLAYING);
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
return GST_PAD_PROBE_REMOVE;
}
else
return GST_PAD_PROBE_PASS;
}
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("videotestsrc", "source");
data.convert = gst_element_factory_make ("identity", "convert");
data.encode = gst_element_factory_make ("avenc_pgm", "encode");
data.sink = gst_element_factory_make ("multifilesink", "sink");
data.fakesink = gst_element_factory_make ("fakesink", "fakesink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.encode, data.sink, NULL);
if (!gst_element_link_many (data.source, data.convert, data.encode, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (data.source, "pattern", 0, NULL);
g_object_set (data.source, "num-buffers", 20, NULL);
g_object_set (data.sink, "location", "frame%05d.pgm", NULL);
data.count = 0;
data.blockpad = gst_element_get_static_pad (data.convert, "src");
gst_pad_add_probe (data.blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER,
pad_probe_cb, &data, NULL);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}

Resources