Gstreamer 1.2.4.1 windows build. Gstreamer Editing Services Examples on Windows Visual Studio 2012 - c

I have dowloaded and installed http://gstreamer.freedesktop.org/data/pkg/windows/1.2.4.1/ gstreamer. Then I set up Visual Studio 2012 C++ project, add all *.props files. I want to try out Gstreamer Editing Services (GES) example project test4.c:
#include <ges/ges.h>
#include <gst/pbutils/encoding-profile.h>
GstEncodingProfile *make_encoding_profile (gchar * audio, gchar * container);
/* This example will take a series of files and create a audio-only timeline
* containing the first second of each file and render it to the output uri
* using ogg/vorbis */
/* make_encoding_profile
* simple method creating an encoding profile. This is here in
* order not to clutter the main function. */
GstEncodingProfile *
make_encoding_profile (gchar * audio, gchar * container)
{
GstEncodingContainerProfile *profile;
GstEncodingProfile *stream;
GstCaps *caps;
caps = gst_caps_from_string (container);
profile =
gst_encoding_container_profile_new ((gchar *) "ges-test4", NULL, caps,
NULL);
gst_caps_unref (caps);
caps = gst_caps_from_string (audio);
stream = (GstEncodingProfile *)
gst_encoding_audio_profile_new (caps, NULL, NULL, 0);
gst_encoding_container_profile_add_profile (profile, stream);
gst_caps_unref (caps);
return (GstEncodingProfile *) profile;
}
int
main (int argc, gchar ** argv)
{
GESPipeline *pipeline;
GESTimeline *timeline;
GESTrack *tracka;
GESLayer *layer;
GMainLoop *mainloop;
GstEncodingProfile *profile;
gchar *container = (gchar *) "application/ogg";
gchar *audio = (gchar *) "audio/x-vorbis";
gchar *output_uri;
guint i;
GError *err = NULL;
GOptionEntry options[] = {
{"format", 'f', 0, G_OPTION_ARG_STRING, &container,
"Container format", "<GstCaps>"},
{"aformat", 'a', 0, G_OPTION_ARG_STRING, &audio,
"Audio format", "<GstCaps>"},
{NULL}
};
GOptionContext *ctx;
ctx = g_option_context_new ("- renders a sequence of audio files.");
g_option_context_add_main_entries (ctx, options, NULL);
g_option_context_add_group (ctx, gst_init_get_option_group ());
if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
g_printerr ("Error initializing: %s\n", err->message);
g_option_context_free (ctx);
return -1;
}
if (argc < 3) {
g_print ("Usage: %s <output uri> <list of audio files>\n", argv[0]);
return -1;
}
/* Initialize GStreamer (this will parse environment variables and commandline
* arguments. */
gst_init (&argc, &argv);
/* Initialize the GStreamer Editing Services */
ges_init ();
/* Setup of an audio timeline */
/* This is our main GESTimeline */
timeline = ges_timeline_new ();
tracka = GES_TRACK (ges_audio_track_new ());
/* We are only going to be doing one layer of clips */
layer = ges_layer_new ();
/* Add the tracks and the layer to the timeline */
if (!ges_timeline_add_layer (timeline, layer))
return -1;
if (!ges_timeline_add_track (timeline, tracka))
return -1;
/* Here we've finished initializing our timeline, we're
* ready to start using it... by solely working with the layer ! */
for (i = 2; i < argc; i++) {
gchar *uri = gst_filename_to_uri (argv[i], NULL);
GESUriClip *src = ges_uri_clip_new (uri);
g_assert (src);
g_free (uri);
g_object_set (src, "start", ges_layer_get_duration (layer),
"duration", GST_SECOND, NULL);
/* Since we're using a GESSimpleLayer, objects will be automatically
* appended to the end of the layer */
ges_layer_add_clip (layer, (GESClip *) src);
}
/* In order to view our timeline, let's grab a convenience pipeline to put
* our timeline in. */
pipeline = ges_pipeline_new ();
/* Add the timeline to that pipeline */
if (!ges_pipeline_set_timeline (pipeline, timeline))
return -1;
/* RENDER SETTINGS ! */
/* We set our output URI and rendering setting on the pipeline */
if (gst_uri_is_valid (argv[1])) {
output_uri = g_strdup (argv[1]);
} else {
output_uri = gst_filename_to_uri (argv[1], NULL);
}
profile = make_encoding_profile (audio, container);
if (!ges_pipeline_set_render_settings (pipeline, output_uri, profile))
return -1;
/* We want the pipeline to render (without any preview) */
if (!ges_pipeline_set_mode (pipeline, GES_PIPELINE_MODE_SMART_RENDER))
return -1;
/* The following is standard usage of a GStreamer pipeline (note how you haven't
* had to care about GStreamer so far ?).
*
* We set the pipeline to playing ... */
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
/* ... and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in
* order to function properly ! */
mainloop = g_main_loop_new (NULL, FALSE);
/* Simple code to have the mainloop shutdown after 4s */
/* FIXME : We should wait for EOS ! */
g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop);
g_main_loop_run (mainloop);
return 0;
}
The program runs from the begining to the end without any errors, however
I can't get the music files playing. I think the problem lurks in this piece of code:
if (gst_uri_is_valid (argv[1])) {
output_uri = g_strdup (argv[1]);
} else {
output_uri = gst_filename_to_uri (argv[1], NULL);
}
I don't really know what to type as the first command line argument (the output dir is expected). When I provide any directory and launch program, it waits for a few seconds and then prints out "Press any key to continue...", however I can't hear the music playing neither I find any files in the provided output directory. What is more I was unable to hear any sounds then trying out test1.c, test2.c and test3.c test examples too. Maybe there is special way to provide arguments? My arguments looked like this: D:/output D:/mp3/1.mp3 D:/mp3/2.mp3 .

Related

Gstreamer appsink to rtsp server with appsrc as source large latency

I'm having pipeline with appsink which pushes samples to appsrc which acts as a source to pipeline created by rtsp server. It works, I can connect to rtsp server and see the streamed video. The problem is latency. For some reason a lot of buffers is queued in the appsrc and the viewed stream has latency of more than two seconds.
I tried to find the source of latency and it looks like the data are started to being read from appsrc source pad after some time from the point the pipeline is started. The delay between the point the pipeline is started and the point data start to be read out from appsrc source pad is then transformed to it's latency.
I found this by reading out how many bytes is queued in appsrc each time I push the buffer to it. This value which I read out is continuously rising for some time. When the read out of data starts the current amout of the bytes stored in appsrc queue stay approximately the same for the rest of the time I stream the video.
Here is my test application which I'm using to test the correct functionality of this design.
#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <time.h>
#include <gst/rtsp-server/rtsp-server.h>
GMainLoop *loop;
GstElement *appsink;
GstElement *appsrc;
GstElement *appsink_pipeline;
/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str);
return TRUE;
}
static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i;
g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx);
return;
}
if (gst_caps_is_empty (caps)) {
g_print ("%sEMPTY\n", pfx);
return;
}
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx);
}
}
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static GstFlowReturn
on_new_sample_from_sink (GstElement * elt, void * data)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
guint64 bytes;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
if(appsrc)
{
bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
g_print("buffered bytes before push %lu\n", bytes);
ret = gst_app_src_push_sample(GST_APP_SRC (appsrc), sample);
// bytes = gst_app_src_get_current_level_bytes(GST_APP_SRC(appsrc));
// if(ret == GST_FLOW_OK)
// g_print("pushed ok - buffered bytes after push %lu\n", bytes);
}
gst_sample_unref (sample);
return ret;
}
/* called when we get a GstMessage from the source pipeline when we get EOS, we
* notify the appsrc of it. */
static gboolean
on_source_message (GstBus * bus, GstMessage * message, void * data)
{
gint percent;
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("The source got dry\n");
gst_app_src_end_of_stream (GST_APP_SRC (appsrc));
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_BUFFERING:
gst_message_parse_buffering (message, &percent);
g_print ("Buffering = %d\n", percent);
break;
default:
break;
}
return TRUE;
}
static GstFlowReturn need_data (GstElement * appsrc_loc,
guint length,
gpointer udata)
{
g_print("Need data\n");
return GST_FLOW_OK;
}
/* this timeout is periodically run to clean up the expired sessions from the
* pool. This needs to be run explicitly currently but might be done
* automatically as part of the mainloop. */
static gboolean
timeout (GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool (server);
gst_rtsp_session_pool_cleanup (pool);
g_object_unref (pool);
return TRUE;
}
void clientConnected(GstRTSPServer* server, GstRTSPClient* client, gpointer user)
{
g_print("%s\n", __func__);
}
static void media_state_cb(GstRTSPMedia *media, GstState state)
{
g_print("media state = %d\n", state);
}
static void
media_construct (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'appsrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
g_signal_connect (appsrc, "need-data",
G_CALLBACK (need_data), NULL);
g_signal_connect (media, "new-state",
G_CALLBACK (media_state_cb), NULL);
gst_object_unref (element);
}
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstPad *pad;
GstCaps *caps;
gchar *caps_str;
GstElement *element;
g_print("%s\n", __func__);
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "appsrc");
pad = gst_element_get_static_pad (appsink, "sink");
if(pad)
{
g_print("Got pad\n");
caps = gst_pad_get_current_caps (pad);
if(caps)
{
caps_str = gst_caps_to_string (caps);
g_print("Got caps %s\n", caps_str);
g_object_set (G_OBJECT (appsrc), "caps", caps, NULL);
gst_caps_unref(caps);
}
}
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
gst_object_unref (element);
}
int main (int argc, char *argv[]){
GstBus *bus;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gchar src[] = "nvv4l2camerasrc device=/dev/video0 ! video/x-raw(memory:NVMM), width=1920, height=1080, format=UYVY, framerate=60/1 ! "
" queue max-size-buffers=3 leaky=downstream ! "
" nvvidconv name=conv ! video/x-raw(memory:NVMM), width=1280, height=720, format=NV12, framerate=60/1 ! "
" nvv4l2h264enc control-rate=1 bitrate=8000000 preset-level=1 profile=0 disable-cabac=1 maxperf-enable=1 name=encoder insert-sps-pps=1 insert-vui=1 idrinterval=30 ! "
" appsink name=appsink sync=false max-buffers=3";
gchar sink[] = "( appsrc name=appsrc format=3 stream-type=0 is-live=true blocksize=2097152 max-bytes=200000 ! "
" queue max-size-buffers=3 leaky=no ! "
" rtph264pay config-interval=1 name=pay0 )";
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create pipeline with appsink */
g_print("Creating pipeline with appsink\n");
appsink_pipeline = gst_parse_launch (src, NULL);
if (appsink_pipeline == NULL) {
g_print ("Bad source\n");
g_main_loop_unref (loop);
return -1;
}
/* to be notified of messages from this pipeline, mostly EOS */
bus = gst_element_get_bus (appsink_pipeline);
gst_bus_add_watch (bus, (GstBusFunc) on_source_message, appsink_pipeline);
gst_object_unref (bus);
/* Create push_buffer callback for appsink */
g_print("Creating push buffer callback\n");
appsink = gst_bin_get_by_name (GST_BIN (appsink_pipeline), "appsink");
g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (appsink, "new-sample",
G_CALLBACK (on_new_sample_from_sink), NULL);
/* Create rtsp server with pipeline starting with appsrc */
g_print("Creating rtsp server\n");
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, sink);
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach (server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds (2, (GSourceFunc) timeout, server);
g_signal_connect (server, "client-connected",
G_CALLBACK (clientConnected), NULL);
/* Create media-constructed callback to get appsrc reference */
g_print("Creating media-constructed callback\n");
g_signal_connect (factory, "media-constructed", (GCallback) media_construct,
NULL);
g_signal_connect (factory, "media-configure", (GCallback) media_configure,
NULL);
/* Push buffers from appsink to appsrc */
/* start serving, this never stops */
g_print("Running main loop\n");
gst_element_set_state (appsink_pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (appsink_pipeline, GST_STATE_NULL);
return 0;
/* ERRORS */
failed:
{
g_print ("failed to attach the server\n");
return -1;
}
}
I will appreciate every idea about what can cause this behavior and how to solve this.
Thanks a lot!
This latency problem may be due to many reasons but most of the time this problem is due to frames are not in SYNC. There is a lot of data in the queue.
To counter this problem need to test these test cases to find out the real problem.
Check the behavior with videotestsrc instead of the camera source.
Are you sure that after nvv4l2camerasrc queue is needed what will be the output if you skip the queue element.
You can also check with lower resolution input to get something from it.
what happened if you use v4l2src instead of nvv4l2camerasrc if your camera soruce is v4l2 complaince.
Thanks

Undefined Reference to gdk_pixbuf_save & gdk_pixbuf_new_from_data

I have the following code it is a modification of code found here. In conjunction with this tutorial here.
static void * snapshot_function(void *userdata){
CustomData *data = (CustomData *) userdata;
gint width, height;
GstSample *sample;
GError *error = NULL;
GdkPixbuf *pixbuf;
gint64 duration, position;
GstStateChangeReturn ret;
gboolean res;
GstMapInfo map;
/* Build pipeline */
data->pipeline_snapshot = gst_parse_launch(pipeline_description_snapshot, &error);
if (error) {
gchar *message =
g_strdup_printf("Unable to build pipeline: %s", error->message);
g_clear_error(&error);
set_ui_message(message, data);
g_free(message);
return NULL;
}
if (error != NULL) {
g_print ("could not construct pipeline: %s\n", error->message);
g_error_free (error);
exit (-1);
}
/* get sink */
data->snapshot_sink = gst_bin_get_by_name (GST_BIN (data->pipeline_snapshot), "sink");
/* set to PAUSED to make the first frame arrive in the sink */
ret = gst_element_set_state (data->pipeline_snapshot, GST_STATE_PAUSED);
switch (ret) {
case GST_STATE_CHANGE_FAILURE:
g_print ("failed to play the file\n");
exit (-1);
case GST_STATE_CHANGE_NO_PREROLL:
/* for live sources, we need to set the pipeline to PLAYING before we can
* receive a buffer. We don't do that yet */
g_print ("live sources not supported yet\n");
exit (-1);
default:
break;
}
/* This can block for up to 5 seconds. If your machine is really overloaded,
* it might time out before the pipeline prerolled and we generate an error. A
* better way is to run a mainloop and catch errors there. */
ret = gst_element_get_state (data->pipeline_snapshot, NULL, NULL, 5 * GST_SECOND);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_print ("failed to play the file\n");
exit (-1);
}
/* get the duration */
gst_element_query_duration (data->pipeline_snapshot, GST_FORMAT_TIME, &duration);
if (duration != -1)
/* we have a duration, seek to 5% */
position = duration * 5 / 100;
else
/* no duration, seek to 1 second, this could EOS */
position = 1 * GST_SECOND;
/* seek to the a position in the file. Most files have a black first frame so
* by seeking to somewhere else we have a bigger chance of getting something
* more interesting. An optimisation would be to detect black images and then
* seek a little more */
gst_element_seek_simple (data->pipeline_snapshot, GST_FORMAT_TIME,
GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_FLUSH, position);
/* get the preroll buffer from appsink, this block untils appsink really
* prerolls */
g_signal_emit_by_name (data->snapshot_sink, "pull-preroll", &sample, NULL);
gst_object_unref (data->snapshot_sink);
/* if we have a buffer now, convert it to a pixbuf. It's possible that we
* don't have a buffer because we went EOS right away or had an error. */
if (sample) {
GstBuffer *buffer;
GdkPixbuf *pixbuf;
GstCaps *caps;
GstStructure *s;
/* get the snapshot buffer format now. We set the caps on the appsink so
* that it can only be an rgb buffer. The only thing we have not specified
* on the caps is the height, which is dependent on the pixel-aspect-ratio
* of the source material */
caps = gst_sample_get_caps (sample);
if (!caps) {
g_print ("could not get snapshot format\n");
exit (-1);
}
s = gst_caps_get_structure (caps, 0);
/* we need to get the final caps on the buffer to get the size */
res = gst_structure_get_int (s, "width", &width);
res |= gst_structure_get_int (s, "height", &height);
if (!res) {
g_print ("could not get snapshot dimension\n");
exit (-1);
}
/* create pixmap from buffer and save, gstreamer video buffers have a stride
* that is rounded up to the nearest multiple of 4 */
buffer = gst_sample_get_buffer (sample);
gst_buffer_map (buffer, &map, GST_MAP_READ);
pixbuf = gdk_pixbuf_new_from_data (map.data,GDK_COLORSPACE_RGB, FALSE, 8, width, height, GST_ROUND_UP_4 (width * 3), NULL, NULL);
/* save the pixbuf */
gdk_pixbuf_save (pixbuf, "snapshot.png", "png", &error, NULL);
gst_buffer_unmap (buffer, &map);
} else {
g_print ("could not make snapshot");
}
/* cleanup and exit */
gst_element_set_state (data->pipeline_snapshot, GST_STATE_NULL);
gst_object_unref (data->pipeline_snapshot);
exit (0);
}
However, when compiling I get the following linker error:
C:/Users/user1/AndroidStudioProjects/Project/app/jni/tutorial-3.c:344:
undefined reference to `gdk_pixbuf_new_from_data'
C:/Users/user1/AndroidStudioProjects/Project/app/jni/tutorial-3.c:347:
undefined reference to `gdk_pixbuf_save' clang++: error: linker
command failed with exit code 1 (use -v to see invocation)
I have #include <gdk-pixbuf-2.0/gdk-pixbuf/gdk-pixbuf.h> at the top of my file, does anyone know what I could potentially be doing wrong?
You have problems with linking, so you need to look at your build script, it seems that you forgot to link the library. For example, if you use cmake, it will look like this:
target_link_libraries(tutorial-3 gdk_pixbuf-2.0)

gst-rtsp-server gst_rtsp_client_close

I am using gst-rtsp-server and I want to be able to close an ongoing stream from the server. I am trying to use the gst_rtsp_client_close() function. However, when I call it the application sometimes becomes unresponsive. In the log I can see: GLib-CRITICAL **: g_main_context_unref: assertion 'g_atomic_int_get (&context->ref_count) > 0' failed​.
I have modified the test-uri example to trigger this issue. Whenever a client connects, a timer will be started. When the timer runs out, gst_rtsp_client_close() will be called for that client.
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <gst/rtsp-server/rtsp-media-factory-uri.h>
#define DEFAULT_RTSP_PORT "8554"
static char *port = (char *) DEFAULT_RTSP_PORT;
static GOptionEntry entries[] = {
{"port", 'p', 0, G_OPTION_ARG_STRING, &port,
"Port to listen on (default: " DEFAULT_RTSP_PORT ")", "PORT"},
{NULL}
};
static void
client_closed(GstRTSPClient *client,
gpointer user_data)
{
g_print("client closed %p\n", client);
}
static gboolean
disconnect (GstRTSPClient * client)
{
g_print("disconnect client %p\n", client);
gst_rtsp_client_close(client);
return FALSE;
}
static void
client_connected(GstRTSPServer *server,
GstRTSPClient *client,
gpointer user_data)
{
g_print("client connected %p\n", client);
g_signal_connect(client, "closed", (GCallback)client_closed,
NULL);
g_timeout_add_seconds (8, (GSourceFunc) disconnect, client);
}
int
main (int argc, gchar * argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactoryURI *factory;
GOptionContext *optctx;
GError *error = NULL;
gchar *uri;
guint major;
guint minor;
guint micro;
guint nano;
gst_version(&major, &minor, &micro, &nano);
g_print("%s started. GST version %u.%u.%u.%u\n", argv[0], major, minor, micro, nano);
optctx = g_option_context_new ("<uri> - Test RTSP Server, URI");
g_option_context_add_main_entries (optctx, entries, NULL);
g_option_context_add_group (optctx, gst_init_get_option_group ());
if (!g_option_context_parse (optctx, &argc, &argv, &error)) {
g_printerr ("Error parsing options: %s\n", error->message);
g_option_context_free (optctx);
g_clear_error (&error);
return -1;
}
g_option_context_free (optctx);
if (argc < 2) {
g_printerr ("Please pass an URI or file as argument!\n");
return -1;
}
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
g_object_set (server, "service", port, NULL);
g_signal_connect(server, "client-connected", (GCallback)client_connected,
NULL);
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a URI media factory for a test stream. */
factory = gst_rtsp_media_factory_uri_new ();
/* check if URI is valid, otherwise convert filename to URI if it's a file */
if (gst_uri_is_valid (argv[1])) {
uri = g_strdup (argv[1]);
} else if (g_file_test (argv[1], G_FILE_TEST_EXISTS)) {
uri = gst_filename_to_uri (argv[1], NULL);
} else {
g_printerr ("Unrecognised command line argument '%s'.\n"
"Please pass an URI or file as argument!\n", argv[1]);
return -1;
}
gst_rtsp_media_factory_uri_set_uri (factory, uri);
g_free (uri);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test",
GST_RTSP_MEDIA_FACTORY (factory));
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach (server, NULL) == 0)
goto failed;
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:%s/test\n", port);
g_main_loop_run (loop);
return 0;
/* ERRORS */
failed:
{
g_print ("failed to attach the server\n");
return -1;
}
}
I start the application with: ./close-test-uri.out ~/Videos/big_buck_bunny.mp4
and then I connect to the RTSP stream using a client (VLC): vlc -vvv "rtsp://127.0.0.1:8554/test"
The video will start playing and after 8 seconds it will (as expected), be disconnected. However, as mentioned before I get the critical error and the application becomes unresponsive (sometimes it crashes).
Am I using gst_rtsp_client_close() it the wrong way?
Thank you in advance!

how to use alsa snd-aloop with gstreamer?

the general goal is that i want to play an audio track on my RPi with aplay ("aplay example.mp3") and the output audio gets looped back into an gstreamer program. This program then does a spectrum analysis.
I got the spectrum analysis already working on a static file with this code as source:
data.source = gst_element_factory_make ("uridecodebin", "source");
g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
ofc i want to use the overall output from my RPi as a source for the program but i dont know how. I know i need to loopback the audio from the output to the input and i found that snd-aloop looks promising. Problem is i still dont know how to use it. I tried to do:
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", XXX ,NULL);
where XXX =
"alsa_output.platform-snd_aloop.0.analog-stereo.monitor"
"hw:1"
"hw:0"
Error -> Trying to dispose element sink, but it is in READY instead of the NULL state. You need to explicitly set Elements to the NULL state before dropping the final reference [...]
Bonus question: Is it possible to pipe audio into a gstreamer program? something like: "aplay example.mp3 > gstreamerCprogram".
Here is the code:
#include <gst/gst.h>
#define AUDIOFREQ 32000
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
static gboolean message_handler (GstBus *bus, GstMessage *message, gpointer data){
if(message->type == GST_MESSAGE_EOS){
g_printerr("EOS\n");
}
if(message->type == GST_MESSAGE_ELEMENT){
const GstStructure *s = gst_message_get_structure (message);
const gchar *name = gst_structure_get_name(s);
if(strcmp(name, "spectrum") == 0){
const GValue *magnitudes;
gdouble freq;
magnitudes = gst_structure_get_value (s,"magnitude");
int i = 0;
for(i = 0; i < 20; ++i){
freq = (gdouble)((32000/2) * i + 32000 / 4 / 20);
if(freq > 10000){
g_printerr("%f\n",freq);
}else{
g_printerr("|");
}
}
}
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstCaps *caps;
GstElement *spectrum;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
//____________________________HERE IS THE PROBLEM________________________
//data.source = gst_element_factory_make ("uridecodebin", "source");
//g_object_set (data.source, "uri", "file:///home/pi/example.mp3", NULL);
data.source = gst_element_factory_make ("alsasrc", "source");
g_object_set(data.source, "device", "alsa_output.platform-snd_aloop.0.analog-stereo.monitor",NULL);
//____________________________HERE ENDS THE PROBLEM________________________
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
spectrum = gst_element_factory_make ("spectrum", "spectrum");
caps = gst_caps_new_simple ("audio/x-raw", "rate",G_TYPE_INT, AUDIOFREQ, NULL);
//SET SOME VARIABLES ON SPECTRUM
g_object_set (G_OBJECT (spectrum), "bands", 20, "post-messages", TRUE, "message-phase", TRUE, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink || !caps || !spectrum) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , spectrum,data.sink, NULL);
if (!gst_element_link_many (data.convert, spectrum, data.sink, NULL)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
GMainLoop *loop;
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_watch(bus, message_handler, NULL);
loop = g_main_loop_new (NULL,FALSE);
g_main_loop_run(loop);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}

How to fork a child proccess without stalling parent process in C

My GTK+2/C application uses Opencv library to grab frames continuously from the web camera feed and there's a UI button, which should calculate some information by grabbing a frames upon clicking.
This button click calculation takes 1-2 seconds and depend on the system resources. (on Raspberry pi, it takes 3-5 seconds)
Previously all functionalities (viewing + calculations) were inside the same main process. However when doing the calculation, whole app (video feed) used to freeze until the completion if the calculation.
I have implemented fork()to make child processes and keep the interprocess communications via pipe() to update the UI with calculated data.
However now the lagging time got lot bigger like 5-6 seconds.
below shows the sample code.
Is there any particular way to avoid this kind of lagging of the main process?
#include <stdio.h>
#include <string.h>
#include <gtk/gtk.h>
#include <diamond_calcs.h>
#include <stdlib.h>
#include <unistd.h>
#include <linux/videodev.h>
#include <fcntl.h>
#include <time.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <opencv/cv.h>
#include <opencv/cxcore.h>
#include "highgui.h"
#include <X11/Xlib.h>
/* declaring the opencv capture variable */
CvCapture* capture, *fm_capture;
GdkPixbuf* pix;
IplImage* frame, *frame_sized;
gboolean expose_event_callback(GtkWidget *widget, GdkEventExpose *event, gpointer data);
gboolean timeout(gpointer data);
gboolean autoon_clicked_cb (GtkWidget *widget, GdkEventExpose *event, gpointer data[]);
// This function grabs a frame and recording some values. also this is the function called by the child process also.
gboolean frame_grabber(void)
{
/* setting up the edge ditection */
IplImage *frame_bgr;
if(frame){
//frame_bgr = cvQueryFrame( capture );
frame_bgr = frame_sized;
}
else{
frame_bgr = cvLoadImage("./data/media/novideo.png", 1);
}
/* Converting Color SPACE to greyscale */
int y_pos, x_pos;
int counter=0;
typedef enum { false, true } bool;
bool col_break = false;
for(int y=0;y<frame_bgr->height;y++)
{
for(int x=0;x<frame_bgr->width;x++)
{
CvScalar color = cvGet2D(frame_bgr, y, x);
if(color.val[0]<color_filter_thres){
if (y_pos == y){
counter++;
}
x_pos = x;
y_pos = y;
if (counter > 2){
//printf("top: %i, %i \n", x_pos, y_pos);
col_break = true;
break;
}
}
}
if (col_break){
break;
}
}
ruler_top_cal_preset = y_pos;
/* bottom ruler - scanning backwards*/
int y_pos_bot, x_pos_bot;
int counter_bot=0;
bool col_break2 = false;
for(int y_b=frame_bgr->height-1;y_b>0;y_b--)
{
for(int x_b=frame_bgr->width-1;x_b>0;x_b--)
{
CvScalar color_bot = cvGet2D(frame_bgr, y_b, x_b);
if(color_bot.val[0]<color_filter_thres){
if (y_pos_bot == y_b){
counter_bot++;
}
x_pos_bot = x_b;
y_pos_bot = y_b;
if (counter_bot > 2){
//printf("bottom: %i, %i \n", x_pos_bot, y_pos_bot);
col_break2 = true;
break;
}
}
}
if (col_break2){
break;
}
}
ruler_bottom_cal_preset = y_pos_bot;
dfactor_preset = ruler_bottom_cal_preset - ruler_top_cal_preset;
}
/*
* main
*
* Program begins here
*/
//####### Lot of processes going inside main, ignore it, just focus on the program flow
int main( int argc, char **argv )
{
/* variables structs to load the gtk_ based widgets and windows */
GtkBuilder *builder;
GtkWidget *window, *event_box, *drawing_area;
GError *error = NULL;
GError *error_settings = NULL;
GtkButton *button;
GtkLabel *label;
/* Init GTK+ */
gtk_init( &argc, &argv );
/* Create new GtkBuilder object */
builder = gtk_builder_new();
/* Load UI from file. If error occurs, report it and quit application. */
if( ! gtk_builder_add_from_file( builder, "file_glade.glade", &error ) )
{
g_warning( "%s", error->message );
g_free( error );
return( 1 );
}
/* Get main window pointer from UI */
window = GTK_WIDGET( gtk_builder_get_object( builder, "window1" ) );
/* creating the drawing area */
drawing_area = gtk_drawing_area_new();
gtk_container_add (GTK_CONTAINER(event_box), drawing_area);
gtk_widget_show (drawing_area);
/* connects the draw (expose-event) with the handler */
g_signal_connect (drawing_area, "expose-event",
G_CALLBACK (expose_event_callback), NULL);
gtk_label_set(labels[0], "Min :");
//####### Here goes the calculation clicking
gtk_signal_connect (GTK_OBJECT (gtk_builder_get_object( builder, "autoon_button" )), "clicked",
GTK_SIGNAL_FUNC (autoon_clicked_cb), labels);
/* Show window. All other widgets are automatically shown by GtkBuilder */
gtk_widget_show( window );
/* load last saved values */
file_load( spinners );
//#######
int fd;
if((fd = open("/dev/video0", O_RDONLY)) == -1){
printf("NO CAP\n");
capture = NULL;
}
else{
capture = cvCreateCameraCapture(0);
cvSetCaptureProperty(capture, CV_CAP_PROP_BRIGHTNESS, brght);
cvSetCaptureProperty(capture, CV_CAP_PROP_CONTRAST,contra);
cvSetCaptureProperty(capture, CV_CAP_PROP_SATURATION, satu);
cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH, 800);
cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT, 600);
// ## remove the forking from here and add that to the expose function
pid_t pID1 = vfork();
if (pID1 == 0) // child process
{
g_timeout_add (15, timeout, drawing_area);
//_exit(0);
}
else if ( pID1 < 0 ){
printf("Failed to fork video overlay child process \n");
exit(1);
}
}
/* Destroy builder, since we don't need it anymore */
g_object_unref( G_OBJECT( builder ) );
//g_object_unref( G_OBJECT( settings_builder ) );
//g_object_unref( G_OBJECT( about_builder ) );
/* Start main loop */
gtk_main();
return( 0 );
}
/* This function runs every x seconds and making the video feed */
/*
* Function: expose_event_callback
* --------------------
* When the widget is exposed of force to expose ths function handler will run to generate the video feed
*
* returns: none
*/
gboolean
expose_event_callback(GtkWidget *widget, GdkEventExpose *event, gpointer data)
{
/* Capture a single frame from the video stream */
if (capture){
frame = cvQueryFrame( capture );
frame_sized = frame;
pix = gdk_pixbuf_new_from_data((guchar*) frame->imageData,
GDK_COLORSPACE_RGB, FALSE, frame->depth, frame->width,
frame->height, (frame->widthStep), NULL, NULL);
}
else{
printf("You've done it\n");
pix = gdk_pixbuf_new_from_file("./data/media/novideo.png", NULL);
}
/* putting the generated pix buff to the correct layout (GTK widget) */
gdk_draw_pixbuf(widget->window,
widget->style->fg_gc[GTK_WIDGET_STATE (widget)], pix, 0, 0, 0, 0,
-1, -1, GDK_RGB_DITHER_NONE, 0, 0); /* Other possible values are GDK_RGB_DITHER_MAX, GDK_RGB_DITHER_NORMAL */
return TRUE;
}
/*
* Function: timeout
* --------------------
* This function runs inbetween given time period. It calls the required functions to generate video and drawing
*
* returns: none
*/
gboolean timeout(gpointer data)
{
/* DEBUG print */
//printf("Time out Hello: %d\n", rand());
/* Redraws the Widget. Widget will call for the callback function again */
gtk_widget_queue_draw (data);
/* Starting the drawing function */
//ofdra(data,NULL);
return TRUE;
}
gboolean autoon_clicked_cb (GtkWidget *widget, GdkEventExpose *event, gpointer data[])
{
char display1_entry_text[100];
int x = 1;
int pfds[2];
pipe(pfds);
pid_t pID = fork();
if (pID == 0) // child process
{
double ytops[10];
double ybots[10];
double min_diam;
double max_diam;
double avg_diam;
clock_t t1, t2,end_t; // clocking for debugging purposes
t2 = clock();
for(int i=0;i<15;i++)
{
//measure_clicked_callback(widget, NULL, data);
frame_grabber();
ytops[i] = ruler_top_cal_preset;
ybots[i] = ruler_bottom_cal_preset;
t1 = clock();
float x = (float)(t1-t2);
float y = x/CLOCKS_PER_SEC;
float z = y*1000;
t2 = t1;
}
/* sorting arrays for ascending order */
if(ybots && ytops){
array_sorter(ybots);
array_sorter(ytops);
min_diam = (ybots[0]-ytops[9])/scale_factor;
max_diam = (ybots[9]-ytops[0])/scale_factor;
avg_diam = (min_diam+max_diam)/2;
}
sprintf(display1_entry_text0,"Min : %.3g",min_diam);
write(pfds[1], display1_entry_text0, 100);
_exit(0);
}
else if(pID <0){
printf("Failed to fork \n");
exit(1);
}
else{
//parent process
char buf[100];
read(fpds[0], buf, 100);
//updates the gtk interface with read value
gtk_lable_set(data[0], buf);
wait(NULL);
}
return TRUE;
}
**Note I have pasted this code from the actual whole code, also this will not run. Pasted only here for give some idea about what I'm trying to achieve.
It looks like you have blocking read and wait calls in your parent process. In other words, despite the fact that you spawn a child process to perform the calculations, you still block your parent process till the child process completes. No parallelism is achieved.
I would suggest having a worker thread in the process that would do the calculations and notify the main thread when the results are available.
In the parent process/thread you should make the read end of the pipe non-blocking and register it with the event loop. Once the read end becomes ready for read, read all data in non-blocking mode.
Set a signal handler for SIGCHLD and call wait in it to release the child process resources.

Resources