How to get h264 frames via gstreamer - c

I'm familiar with ffmpeg, but not with GStreamer. I know how to get a H264 frame through ffmpeg, for example, I can get a H264 frame through AVPacket. But I don't know how to use GStreamer to get a frame of h264. I don't intend to save the H264 data directly as a local file because I need to do other processing. Can anyone give me some sample code? I'll be very grateful. Here's what I learned from other people's code.
#include <stdio.h>
#include <string.h>
#include <fstream>
#include <unistd.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *filter1;
GstElement *encoder;
GstElement *filter2;
GstElement *parser;
GstElement *qtmux;
GstElement *sink;
GstClockTime timestamp;
guint sourceid;
} gst_app_t;
static gst_app_t gst_app;
int main()
{
gst_app_t *app = &gst_app;
GstStateChangeReturn state_ret;
gst_init(NULL, NULL); //Initialize Gstreamer
app->timestamp = 0; //Set timestamp to 0
//Create pipeline, and pipeline elements
app->pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
app->src = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
app->filter1 = gst_element_factory_make ("capsfilter", "myfilter1");
app->encoder = gst_element_factory_make ("omxh264enc", "myomx");
app->filter2 = gst_element_factory_make ("capsfilter", "myfilter2");
app->parser = gst_element_factory_make("h264parse" , "myparser");
app->qtmux = gst_element_factory_make("qtmux" , "mymux");
app->sink = gst_element_factory_make ("filesink" , NULL);
if( !app->pipeline ||
!app->src || !app->filter1 ||
!app->encoder || !app->filter2 ||
!app->parser || !app->qtmux ||
!app->sink ) {
printf("Error creating pipeline elements!\n");
exit(2);
}
//Attach elements to pipeline
gst_bin_add_many(
GST_BIN(app->pipeline),
(GstElement*)app->src,
app->filter1,
app->encoder,
app->filter2,
app->parser,
app->qtmux,
app->sink,
NULL);
//Set pipeline element attributes
g_object_set (app->src, "format", GST_FORMAT_TIME, NULL);
GstCaps *filtercaps1 = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 1280,
"height", G_TYPE_INT, 720,
"framerate", GST_TYPE_FRACTION, 1, 1,
NULL);
g_object_set (G_OBJECT (app->filter1), "caps", filtercaps1, NULL);
GstCaps *filtercaps2 = gst_caps_new_simple ("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",
NULL);
g_object_set (G_OBJECT (app->filter2), "caps", filtercaps2, NULL);
g_object_set (G_OBJECT (app->sink), "location", "output.h264", NULL);
//Link elements together
g_assert( gst_element_link_many(
(GstElement*)app->src,
app->filter1,
app->encoder,
app->filter2,
app->parser,
app->qtmux,
app->sink,
NULL ) );
//Play the pipeline
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
g_assert(state_ret == GST_STATE_CHANGE_ASYNC);
//Get a pointer to the test input
FILE *testfile = fopen("test.yuv", "rb");
g_assert(testfile != NULL);
//Push the data from buffer to gstpipeline 100 times
for(int i = 0; i < 100; i++) {
char* filebuffer = (char*)malloc (1382400); //Allocate memory for framebuffer
if (filebuffer == NULL) {printf("Memory error\n"); exit (2);} //Errorcheck
size_t bytesread = fread(filebuffer, 1 , (1382400), testfile); //Read to filebuffer
//printf("File Read: %zu bytes\n", bytesread);
GstBuffer *pushbuffer; //Actual databuffer
GstFlowReturn ret; //Return value
pushbuffer = gst_buffer_new_wrapped (filebuffer, 1382400); //Wrap the data
//Set frame timestamp
GST_BUFFER_PTS (pushbuffer) = app->timestamp;
GST_BUFFER_DTS (pushbuffer) = app->timestamp;
GST_BUFFER_DURATION (pushbuffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
app->timestamp += GST_BUFFER_DURATION (pushbuffer);
//printf("Frame is at %lu\n", app->timestamp);
ret = gst_app_src_push_buffer( app->src, pushbuffer); //Push data into pipeline
g_assert(ret == GST_FLOW_OK);
}
usleep(100000);
//Declare end of stream
gst_app_src_end_of_stream (GST_APP_SRC (app->src));
printf("End Program.\n");
return 0;
}
Here is a link to the source of the code
link

Your example serves for the purpose of feeding the data from application to the GStreamer with a hope to encode with x264 and the result goes to file.
What you need (I am guessing here) is to read data from file - lets say movie.mp4 and get the decoded data into your application (?)
I believe you have two options:
1, Use appsink instead of filesink and feed the data from file using filesrc. So if you need also other processing beside grabbing the h264 frames (like playing or sending via network), you would have to use tee to split the pipeline into two output branches like example gst-launch below. One branch of output pipeline would go to for example windowed output - autovideosink and the other part would go to your application.
To demonstrate this split and still show you what is really happening I will use debugging element identity which is able to dump data which goes through it.
This way you will learn to use this handy tool for experiments and verification that you know what you are doing. This is not the solution you need.
gst-launch-1.0 -q filesrc location= movie.mp4 ! qtdemux name=qt ! video/x-h264 ! h264parse ! tee name=t t. ! queue ! avdec_h264 ! videoconvert ! autovideosink t. ! queue ! identity dump=1 ! fakesink sync=true
This pipeline plays the video into window (autovideosink) and the other branch of tee goes to the debugging element called identity which dumps the frame in hexdump manner (with addresses, character representation and everything).
So what you see in the stdout of the gst-launch are actual h264 frames (but you do not see boundaries or anything.. its just really raw dump).
To understand the gst-launch syntax (mainly the aliases with name=) check this part of the documentation.
In real code you would not use identity and fakesink but instead you would link there just appsink and connect the appsink signals to callbacks in your C source code.
There are nice examples for this, I will not attempt to give you complete solution. This example demonstrate how to get samples out of appsink.
The important bits are:
/* The appsink has received a buffer */
static GstFlowReturn new_sample (GstElement *sink, CustomData *data) {
GstSample *sample;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-sample", &sample);
if (sample) {
/* The only thing we do in this example is print a * to indicate a received buffer */
g_print ("*");
gst_sample_unref (sample);
return GST_FLOW_OK;
}
return GST_FLOW_ERROR;
}
// somewhere in main()
// construction and linkage of elements
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
2, Second solution is to use pad probe registered for buffers only. Pad probe is a way to register a callback on any pad of any element in pipeline and tell GStreamer in what information are you interested in on that probe. You can ask it to call the callback upon every event, or any downstream event, or on any buffer going through that probe. In the callback which pad probe calls you will extract the buffer and the actuall data in that buffer.
Again there are many examples on how to use pad probes.
One very nice example containing logic of almost exactly what you need can be found here
The important bits:
static GstPadProbeReturn
cb_have_data (GstPad *pad,
GstPadProbeInfo *info,
gpointer user_data)
{
// ... the code for writing the buffer data somewhere ..
}
// ... later in main()
pad = gst_element_get_static_pad (src, "src");
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
(GstPadProbeCallback) cb_have_data, NULL, NULL);

Related

GStreamer plugin src -> opusdec sink gives "error: decoder not initialized"

I have the following two pipelines to transmit opus encoded audio from server to client:
The server:
gst-launch-1.0 -v alsasrc ! audioconvert ! audioresample ! audio/x-raw, rate=16000, channels=1, format=S16LE ! opusenc ! rtpopuspay ! udpsink host=0.0.0.0 port=4000
The client:
gst-launch-1.0 udpsrc port=4000 ! application/x-rtp,payload=96,encoding-name=OPUS ! rtpopusdepay ! opusdec ! autoaudiosink
I try to create a custom GstElement based plugin to replace rtpopusdepay in the client side with a hand-crafted one (to be backward compatible with an existing server implementation that doesn't use rtpopuspay but uses a hand-crafted byte-format to wrap the opus encoded data).
To test the concept I would like to use the pipelines above, but replace the client side with:
GST_PLUGIN_PATH=. gst-launch-1.0 udpsrc port=4000 ! simpacketdepay ! opusdec ! autoaudiosink
Where simpacketdepay is the plugin I created. The plugin is quite simple, it has fixed caps (ANY for its sink and "audio/x-opus" for its src). In its chain function I simply remove the payload rtpopuspay adds to the encoded opus stream (first 96 bits) and push the data forward.
The full code:
#include "gstsimpacketdepay.h"
#include <stdio.h>
#include <string.h>
#include <gst/gst.h>
#include <gst/gstcaps.h>
GST_DEBUG_CATEGORY_STATIC (gst_simpacketdepay_debug);
#define GST_CAT_DEFAULT gst_simpacketdepay_debug
/* Enum to identify properties */
enum
{
PROP_0
};
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE(
"sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS("ANY")
);
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE (
"src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS("audio/x-opus, rate=16000, channels=1, channel-mapping-family=0, stream-count=1, coupled-count=0")
);
/* Define our element type. Standard GObject/GStreamer boilerplate stuff */
#define gst_simpacketdepay_parent_class parent_class
G_DEFINE_TYPE(GstSimpacketdepay, gst_simpacketdepay, GST_TYPE_ELEMENT);
static GstFlowReturn gst_simpacketdepay_chain (GstPad *pad, GstObject *parent, GstBuffer *buf);
static void gst_simpacketdepay_class_init (GstSimpacketdepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
/* Set sink and src pad capabilities */
gst_element_class_add_pad_template (gstelement_class, gst_static_pad_template_get(&src_factory));
gst_element_class_add_pad_template (gstelement_class, gst_static_pad_template_get(&sink_factory));
/* Set metadata describing the element */
gst_element_class_set_details_simple (
gstelement_class,
"simpacketdepay plugin",
"simpacketdepay plugin",
"Sim Packet depay",
"Test"
);
}
static void gst_simpacketdepay_init (GstSimpacketdepay * simpacketdepay)
{
simpacketdepay->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
simpacketdepay->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_use_fixed_caps(simpacketdepay->sinkpad);
gst_pad_use_fixed_caps(simpacketdepay->srcpad);
gst_element_add_pad (GST_ELEMENT (simpacketdepay), simpacketdepay->sinkpad);
gst_element_add_pad (GST_ELEMENT (simpacketdepay), simpacketdepay->srcpad);
gst_pad_set_chain_function (simpacketdepay->sinkpad, gst_simpacketdepay_chain);
}
static GstFlowReturn gst_simpacketdepay_chain (GstPad *pad, GstObject *parent, GstBuffer *inBuf)
{
GstSimpacketdepay *filter = GST_SIMPACKETDEPAY(parent);
GstMapInfo info;
gst_buffer_map(inBuf, &info, GST_MAP_READ);
const size_t inSize = info.size;
printf("Incoming size %lu\n", info.size);
gst_buffer_unmap(inBuf, &info);
GstBuffer* outBuf = gst_buffer_copy(inBuf);
GstMemory* const inMemory = gst_buffer_get_memory(inBuf, 0);
GstMemory* const outMemory = gst_memory_share(inMemory, 12, inSize - 12);
gst_buffer_remove_all_memory(outBuf);
gst_buffer_prepend_memory(outBuf, outMemory);
gst_buffer_map(outBuf, &info, GST_MAP_READ);
printf("Outgoing size: %lu\n", info.size);
fflush(stdout);
gst_buffer_unmap(outBuf, &info);
gst_buffer_unref (inBuf);
GstFlowReturn result = gst_pad_push (filter->srcpad, outBuf);
return result;
}
static gboolean simpacketdepay_plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_simpacketdepay_debug, "simpacketdepay", 0, "simpacketdepay");
return gst_element_register (plugin, "simpacketdepay", GST_RANK_NONE, GST_TYPE_SIMPACKETDEPAY);
}
#ifndef VERSION
#define VERSION "1.0.0"
#endif
#ifndef PACKAGE
#define PACKAGE "FIXME_package"
#endif
#ifndef PACKAGE_NAME
#define PACKAGE_NAME "FIXME_package_name"
#endif
#ifndef GST_PACKAGE_ORIGIN
#define GST_PACKAGE_ORIGIN "http://FIXME.org/"
#endif
GST_PLUGIN_DEFINE (
GST_VERSION_MAJOR,
GST_VERSION_MINOR,
simpacketdepay,
"FIXME plugin description",
simpacketdepay_plugin_init,
VERSION,
"LGPL",
PACKAGE_NAME,
GST_PACKAGE_ORIGIN
)
The negotiations and everything goes well, until I push the first buffer to the source pad from gst_simpacketdepay_chain with GstFlowReturn result = gst_pad_push (filter->srcpad, outBuf);
Then I get the following error (pasted the detailed debug log here)
0:00:00.510871708 42302 0x55fbd0c44000 LOG audiodecoder gstaudiodecoder.c:2034:gst_audio_decoder_chain:<opusdec0> received buffer of size 160 with ts 0:00:00.006492658, duration 99:99:99.999999999
0:00:00.510877845 42302 0x55fbd0c44000 WARN audiodecoder gstaudiodecoder.c:2084:gst_audio_decoder_chain:<opusdec0> error: decoder not initialized
0:00:00.510882963 42302 0x55fbd0c44000 DEBUG GST_MESSAGE gstelement.c:2110:gst_element_message_full_with_details:<opusdec0> start
0:00:00.510896592 42302 0x55fbd0c44000 INFO GST_ERROR_SYSTEM gstelement.c:2140:gst_element_message_full_with_details:<opusdec0> posting message: GStreamer error: negotiation problem.
0:00:00.510910301 42302 0x55fbd0c44000 LOG GST_MESSAGE gstmessage.c:303:gst_message_new_custom: source opusdec0: creating new message 0x7f519c002910 error
0:00:00.510919198 42302 0x55fbd0c44000 WARN structure gststructure.c:1861:priv_gst_structure_append_to_gstring: No value transform to serialize field 'gerror' of type 'GError'
0:00:00.510929043 42302 0x55fbd0c44000 DEBUG GST_BUS gstbus.c:315:gst_bus_post:<bus1> [msg 0x7f519c002910] posting on bus error message: 0x7f519c002910, time 99:99:99.999999999, seq-num 43, element 'opusdec0', GstMessageError, gerror=(GError)NULL, debug=(string)"gstaudiodecoder.c\(2084\):\ gst_audio_decoder_chain\ \(\):\ /GstPipeline:pipeline0/GstOpusDec:opusdec0:\012decoder\ not\ initialized";
0:00:00.510937098 42302 0x55fbd0c44000 DEBUG bin gstbin.c:3718:gst_bin_handle_message_func:<pipeline0> [msg 0x7f519c002910] handling child opusdec0 message of type error
0:00:00.510942210 42302 0x55fbd0c44000 DEBUG bin gstbin.c:3727:gst_bin_handle_message_func:<pipeline0> got ERROR message, unlocking state change
0:00:00.510947151 42302 0x55fbd0c44000 DEBUG bin gstbin.c:4065:gst_bin_handle_message_func:<pipeline0> posting message upward
0:00:00.510955219 42302 0x55fbd0c44000 WARN structure gststructure.c:1861:priv_gst_structure_append_to_gstring: No value transform to serialize field 'gerror' of type 'GError'
0:00:00.510962328 42302 0x55fbd0c44000 DEBUG GST_BUS gstbus.c:315:gst_bus_post:<bus2> [msg 0x7f519c002910] posting on bus error message: 0x7f519c002910, time 99:99:99.999999999, seq-num 43, element 'opusdec0', GstMessageError, gerror=(GError)NULL, debug=(string)"gstaudiodecoder.c\(2084\):\ gst_audio_decoder_chain\ \(\):\ /GstPipeline:pipeline0/GstOpusDec:opusdec0:\012decoder\ not\ initialized";
<opusdec0> error: decoder not initialized? Do I need to do something special to initialize the opus decoder? What step do I miss?
I was able to solve the issue. When the plugin element enters playing state we should push a gst_event_new_caps event to the source pad. Even with fixed caps... I haven't found anything in the documentation that can explain this requirement.
So I added the following state change handler and the pipeline started to work:
static GstStateChangeReturn gst_simpacketdepay_change_state (GstElement *element, GstStateChange transition)
{
const GstStateChangeReturn result = GST_ELEMENT_CLASS(parent_class)->change_state (element, transition);
if (result == GST_STATE_CHANGE_FAILURE) {
return result;
}
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING: {
GstSimpacketdepay *filter = GST_SIMPACKETDEPAY(element);
gst_pad_push_event(filter->srcpad, gst_event_new_caps(gst_pad_template_get_caps(gst_static_pad_template_get(&src_factory))));
} break;
default:
break;
}
return result;
}
I'm sad to see how underdocumented this part of GStreamer is.

How do i communicate with the encoder in gstreamer

I've just started learning gstreamer and I'm having some troubles communicating with the x264 encoder.
I'm trying to stream a my screen from one device to another using the x264 encoder. However my network is pretty unstable so sometimes i miss an IDR frame. This means basically no video for at least 2s until the next IDR-frame is received.
I'm trying to implement some way my device can ask gstreamer to generate an IDR-frame as soon as one is lost.
Here is a brief summary of the code so far:
#include <gst/gst.h>
int main(int argc, char* argv[])
{
GstElement* pipeline;
GstElement* videosource, * videoconverter, * videoencoder, * videosink;
GstBus* bus;
GstMessage* msg;
GstStateChangeReturn ret;
gst_init(&argc, &argv);
videosource = gst_element_factory_make("dxgdiscreencapsrc", "video_source");
videoconverter = gst_element_factory_make("autovideoconvert", "video_converter");
videoencoder = gst_element_factory_make("x264enc", "video_encoder");
videosink = gst_element_factory_make("udpsink", "video_sink");
// create the pipeline
pipeline = gst_pipeline_new("video_pipeline");
if (!pipeline || !videosource || !videoconverter, !videoencoder || !videosink) {
g_printerr("All elements could not be created");
gst_object_unref(pipeline);
return -1;
}
// build pipeline
gst_bin_add_many(GST_BIN(pipeline), videosource, videoconverter, videoencoder, videosink,
NULL);
// link elements
if (gst_element_link_many(videosource, videoconverter, videoencoder, videosink, NULL) != TRUE) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
// modify properties
g_object_set(videoencoder,
"threads", 8,
"quantizer", 21, // quantizer must be set to where bitrate is around desired value
"bitrate", 4800, // kbit/s
"vbv-buf-capacity", 5000, // the more it is the higher the bitrate fluctuations. set to around bitrate or bitrate/framerate for more control
"tune", 0x4, // zerolatency
"speed-preset", 2, // superfast
"key-int-max", 120, // ideally twice the framerate
NULL);
g_object_set(videosink,
"host", "REDACTED",
"port", 1234,
NULL);
// start playing
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(pipeline);
return -1;
}
... read bus and cleanup afterwards (same as those in gstreamer tutorials)
How do i force the next frame to be an IDR-frame? Is there any way to directly access the underlying encoder parameters to set the X264_TYPE_IDR flag?
Thanks for your time!

gstreamer 1.14.5 multiple rtspsrc element pipeline, reconnect individual streams when disconnected via 'C' code

Hello GStreamer community & fans,
I have a working pipeline that connects to multiple H.264 IP camera streams using multiple rtspsrc elements aggregated into a single pipeline for downstream video processing.
Intermittently & randomly, streams coming in from remote & slower connections will have problems, timeout, retry and go dead, leaving that stream with a black image when viewing the streams post processing. The other working streams continue to process normally. The rtspsrc elements are setup to retry the rtsp connection, and that seems to somewhat work, but for when it doesn't, I'm looking for a way to disconnect the stream entirely from the rtspsrc element and restart that particular stream without disrupting the other streams.
I haven't found any obvious examples or ways to accomplish this, so I've been tinkering with the rtspsrc element code itself using this public function to access the rtspsrc internals that handle connecting.
__attribute__ ((visibility ("default"))) GstRTSPResult my_gst_rtspsrc_conninfo_reconnect(GstRTSPSrc *, gboolean);
GstRTSPResult
my_gst_rtspsrc_conninfo_reconnect(GstRTSPSrc *src, gboolean async)
{
int retries = 0, port = 0;
char portrange_buff[32];
// gboolean manual_http;
GST_ELEMENT_WARNING(src, RESOURCE, READ, (NULL),
(">>>>>>>>>> Streamer: A camera closed the streaming connection. Trying to reconnect"));
gst_rtspsrc_set_state (src, GST_STATE_PAUSED);
gst_rtspsrc_set_state (src, GST_STATE_READY);
gst_rtspsrc_flush(src, TRUE, FALSE);
// manual_http = src->conninfo.connection->manual_http;
// src->conninfo.connection->manual_http = TRUE;
gst_rtsp_connection_set_http_mode(src->conninfo.connection, TRUE);
if (gst_rtsp_conninfo_close(src, &src->conninfo, TRUE) == GST_RTSP_OK)
{
memset(portrange_buff, 0, sizeof(portrange_buff));
g_object_get(G_OBJECT(src), "port-range", portrange_buff, NULL);
for (retries = 0; portrange_buff[retries] && isdigit(portrange_buff[retries]); retries++)
port = (port * 10) + ((int)(portrange_buff[retries]) + 48);
if (port != src->client_port_range.min)
GST_ELEMENT_WARNING(src, RESOURCE, READ, (NULL), (">>>>>>>>>> Streamer: port range start mismatch"));
GST_WARNING_OBJECT(src, ">>>>>>>>>> Streamer: old port.min: %d, old port.max: %d, old port-range: %s\n", (src->client_port_range.min), (src->client_port_range.max), (portrange_buff));
src->client_port_range.min += 6;
src->client_port_range.max += 6;
src->next_port_num = src->client_port_range.min;
memset(portrange_buff, 0, sizeof(portrange_buff));
sprintf(portrange_buff, "%d-%d", src->client_port_range.min, src->client_port_range.max);
g_object_set(G_OBJECT(src), "port-range", portrange_buff, NULL);
for (retries = 0; retries < 5 && gst_rtsp_conninfo_connect(src, &src->conninfo, async) != GST_RTSP_OK; retries++)
sleep(10);
}
if (retries < 5)
{
gst_rtspsrc_set_state(src, GST_STATE_PAUSED);
gst_rtspsrc_set_state(src, GST_STATE_PLAYING);
return GST_RTSP_OK;
}
else return GST_RTSP_ERROR;
}
I realize this is probably not best practice and I'm doing this to find a better way once I understand the internals better through this learning experience.
I appreciate any feedback anyone has to this problem.
-Doug

Segfault during GTK_TOGGLE_BUTTON() macro. Pointer values changed after changing checkbox state

I am trying to create a GUI program using Glade and GTK+ 3 to write set and write configuration data to a custom microchip. I am trying to implement a feature to save/load previous configurations into my GUI. After saving to a file and loading, some of my callback functions now cause a segfault. One example being a callback for a "global enable" checkbox.
I am currently trying to read data from my file and using that data to update the state of the GUI (as well as global variables used for configuring the IC). I update my checkbox as follows:
/* Global widget handles */
GtkWidget* GEN_CB_h;
GtkWidget* Neg_Pol_CB_h;
GtkWidget* Save_File_Name_Box_h;
GtkWidget* Save_File_Button_h:
GtkWidget* Load_File_Button_h;
//more widgets here
/* Global configuration variables */
char gen;
char neg_pol;
//more variables here
/* Write data to a configuration file. */
void on_Save_Config_Button_clicked()
{
GtkEntry* save_file = GTK_ENTRY(Save_File_Box_h);
const gchar* filename = gtk_entry_get_text(save_file);
FILE* fd = fopen((const char*)filename, "w");
if(!fd)
{
perror("Failed to open file\n");
exit(EXIT_FAILURE);
}
fwrite(&gen, sizeof(gen), 1, fd);
fwrite(&neg_pol, sizeof(neg_pol), 1, fd);
//more variables written here
fclose(fd);
g_printf("File saved to: %s\n", filename);
}
/* Load data from a stored configuration file */
void on_Load_File_Button_selection_changed()
{
GtkFileChooser* file = GTK_FILE_CHOOSER(Load_File_Box_h);
gchar* filename = gtk_file_chooser_get_filename(file);
FILE* fd = fopen((const char*)filename, "r");
if(!fd)
{
perror("Failed to open file\n");
exit(EXIT_FAILURE);
}
fread(&gen, sizeof(gen), 1, fd);
gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(GEN_CB_h), gen);
fread(&neg_pol, sizeof(neg_pol), 1, fd);
gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(neg_pol_cb), neg_pol);
/* read more data and update more GUI elements here */
}
/* Callback for negative polarity bit checkbox */
void on_Neg_Pol_CB_toggled()
{
GtkToggleButton* neg_pol_cb = GTK_TOGGLE_BUTTON(Neg_Pol_CB_h);
neg_pol = (char)gtk_toggle_button_get_active(neg_pol_cb);
printf("Neg pol toggled: %s\n", (neg_pol) ? "ON":"OFF");
}
/* Callback for global enable bit checkbox */
void on_GEN_CB_toggled()
{
printf("gen_cb = %p\n", GEN_CB_h); //print before cast for debugging
GtkToggleButton* gen_cb = GTK_TOGGLE_BUTTON(GEN_CB_h);
gen = (char)gtk_toggle_button_get_active(gen_cb);
printf("GEN toggled: %s\n", (gen) ? "ON":"OFF");
}
int main(int argc, char *argv[])
{
GtkBuilder *builder;
GtkWidget *window;
gtk_init(&argc, &argv);
builder = gtk_builder_new();
gtk_builder_add_from_file (builder, "CFD.glade", NULL);
window = GTK_WIDGET(gtk_builder_get_object(builder, "window_main"));
GEN_CB_h = GTK_WIDGET(gtk_builder_get_object(builder, "GEN_CB"));
Neg_Pol_CB_h = GTK_WIDGET(gtk_builder_get_object(builder, "Neg_Pol_CB"));
Save_File_Box_h = GTK_WIDGET(gtk_builder_get_object(builder, "Save_File_Name_Box"));
Load_File_Box_h = GTK_WIDGET(gtk_builder_get_object(builder, "Load_File_Button"));
Save_File_Button_h = GTK_WIDGET(gtk_builder_get_object(builder, "Save_Config_Button"));
//create more widgets here
gen = 1;
neg_pol = 0; //positive polarity
//set more variables here
gtk_builder_connect_signals(builder, NULL);
printf("connect returned\n");
g_object_unref(builder);
printf("unref returned\n");
gtk_widget_show(window);
printf("show returned\n");
gtk_main();
return 0;
}
The issue I am having is that after loading a new configuration (i.e. triggering my Load_File_Button callback) my GEN_CB callback triggers a segfault on this line:
GtkToggleButton* gen_cb = GTK_TOGGLE_BUTTON(GEN_CB_h);
I used GDB to see what the cause of the segfault was and it reported it as:
Thread 1 "cfd_gui" received signal SIGSEGV, Segmentation fault.
0x00007ffff7395ea0 in g_type_check_instance_cast () from /usr/lib/libgobject-2.0.so.0
I also included a printf statement to check and see if anything was happening to the pointer between loads and found that the pointer value is being modified after this line is executed:
gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(GEN_CB_h), gen);
I am still new to GTK3 so I am probably using something wrong but have not been able to figure it out. Here are the outputs of my printf debug statements:
gen_cb = 0x55cd1aa04240
GEN toggled: OFF
Loaded configuration from file: /home/borabutt/github/CFD-RPI-Test/src/C/gui/test.dat
gen_cb = 0x55ea7bb68240
The first line shows the pointer to GEN_CB_h before configuration data is loaded. Then as can clearly be seen, the pointer value is changed after the configuration data is loaded. My investigations have shown this change occurs after setting the state of the checkbox as shown above.
I need to be able to update the state of many checkboxes and combo boxes to properly reflect the loaded state from the file. Right now the GUI works completely fine until I load data from a file. Any help and insight is appreciated.
EDIT:
I should note that I included the Neg_Pol_CB callback because this function does not cause a segfault after loading configuration data, while the GEN_CB callback does.
I got it solved. Turns out the code I was editing/building on my laptop was out of date with what was on my github (github code is what I posted here). I thought I pulled but apparently not. I had a line of code where I was writing/reading the wrong data element size to the file:
fwrite(&gen, sizeof(&gen), 11, fd);
...
fread(&gen, sizeof(&gen), 11, fd);
Two mistakes, I was accidentally reading/writing sizeof(&gen) instead of sizeof(gen) and I was also reading/writing 11 data elements instead of one. This caused me to overwrite some other pointers obviously. I'll work on updating all of my callbacks to match the signatures, but it all seems to be working now.

ALSA equivalent to /dev/audio dump?

This will be my poorest question ever...
On an old netbook, I installed an even older version of Debian, and toyed around a bit. One of the rather pleasing results was a very basic MP3 player (using libmpg123), integrated for adding background music to a little application doing something completely different. I grew rather fond of this little solution.
In that program, I dumped the decoded audio (from mpg123_decode()) to /dev/audio via a simple fwrite().
This worked fine - on the netbook.
Now, I came to understand that /dev/audio was something done by OSS, and is no longer supported on newer (ALSA) machines. Sure enough, my laptop (running a current Linux Mint) does not have this device.
So apparently I have to use ALSA instead. Searching the web, I've found a couple of tutorials, and they pretty much blow my mind. Modes, parameters, capabilities, access type, sample format, sample rate, number of channels, number of periods, period size... I understand that ALSA is a powerful API for the ambitious, but that's not what I am looking for (or have the time to grok). All I am looking for is how to play the output of mpg123_decode (the format of which I don't even know, not being an audio geek by a long shot).
Can anybody give me some hints on what needs to be done?
tl;dr
How do I get ALSA to play raw audio data?
There's an OSS compatibility layer for ALSA in the alsa-oss package. Install it and run your program inside the "aoss" program. Or, modprobe the modules listed here:
http://wiki.debian.org/SoundFAQ/#line-105
Then, you'll need to change your program to use "/dev/dsp" or "/dev/dsp0" instead of "/dev/audio". It should work how you remembered... but you might want to cross your fingers just in case.
You could install sox and open a pipe to the play command with the correct samplerate and sample size arguments.
Using ALSA directly is overly complicated, so I hope a Gstreamer solution is fine to you too. Gstreamer gives a nice abstraction to ALSA/OSS/Pulseaudio/you name it -- and is ubiquitous in the Linux world.
I wrote a little library that will open a FILE object where you can fwrite PCM data into:
Gstreamer file. The actual code is less than 100 lines.
Use use it like that:
FILE *output = fopen_gst(rate, channels, bit_depth); // open audio output file
while (have_more_data) fwrite(data, amount, 1, output); // output audio data
fclose(output); // close the output file
I added an mpg123 example, too.
Here is the whole file (in case Github get's out of business ;-) ):
/**
* gstreamer_file.c
* Copyright 2012 René Kijewski <rene.SURNAME#fu-berlin.de>
* License: LGPL 3.0 (http://www.gnu.org/licenses/lgpl-3.0)
*/
#include "gstreamer_file.h"
#include <stdbool.h>
#include <stdlib.h>
#include <unistd.h>
#include <glib.h>
#include <gst/gst.h>
#ifndef _GNU_SOURCE
# error "You need to add -D_GNU_SOURCE to the GCC parameters!"
#endif
/**
* Cookie passed to the callbacks.
*/
typedef struct {
/** { file descriptor to read from, fd to write to } */
int pipefd[2];
/** Gstreamer pipeline */
GstElement *pipeline;
} cookie_t;
static ssize_t write_gst(void *cookie_, const char *buf, size_t size) {
cookie_t *cookie = cookie_;
return write(cookie->pipefd[1], buf, size);
}
static int close_gst(void *cookie_) {
cookie_t *cookie = cookie_;
gst_element_set_state(cookie->pipeline, GST_STATE_NULL); /* we are finished */
gst_object_unref(GST_OBJECT(cookie->pipeline)); /* we won't access the pipeline anymore */
close(cookie->pipefd[0]); /* we won't write anymore */
close(cookie->pipefd[1]); /* we won't read anymore */
free(cookie); /* dispose the cookie */
return 0;
}
FILE *fopen_gst(long rate, int channels, int depth) {
/* initialize Gstreamer */
if (!gst_is_initialized()) {
GError *error;
if (!gst_init_check(NULL, NULL, &error)) {
g_error_free(error);
return NULL;
}
}
/* get a cookie */
cookie_t *cookie = malloc(sizeof(*cookie));
if (!cookie) {
return NULL;
}
/* open a pipe to be used between the caller and the Gstreamer pipeline */
if (pipe(cookie->pipefd) != 0) {
close(cookie->pipefd[0]);
close(cookie->pipefd[1]);
free(cookie);
return NULL;
}
/* set up the pipeline */
char description[256];
snprintf(description, sizeof(description),
"fdsrc fd=%d ! " /* read from a file descriptor */
"audio/x-raw-int, rate=%ld, channels=%d, " /* get PCM data */
"endianness=1234, width=%d, depth=%d, signed=true ! "
"audioconvert ! audioresample ! " /* convert/resample if needed */
"autoaudiosink", /* output to speakers (using ALSA, OSS, Pulseaudio ...) */
cookie->pipefd[0], rate, channels, depth, depth);
cookie->pipeline = gst_parse_launch_full(description, NULL,
GST_PARSE_FLAG_FATAL_ERRORS, NULL);
if (!cookie->pipeline) {
close(cookie->pipefd[0]);
close(cookie->pipefd[1]);
free(cookie);
return NULL;
}
/* open a FILE with specialized write and close functions */
cookie_io_functions_t io_funcs = { NULL, write_gst, NULL, close_gst };
FILE *result = fopencookie(cookie, "w", io_funcs);
if (!result) {
close_gst(cookie);
return NULL;
}
/* start the pipeline (of cause it will wait for some data first) */
gst_element_set_state(cookie->pipeline, GST_STATE_PLAYING);
return result;
}
And ten years later, the "actual" answer is found: That's the wrong way to do it in the first place.
libmpg123 comes with a companion library, libout123, which abstracts the underlying audio system for you. Based on libmpg123 example code:
#include <stdlib.h>
#include "mpg123.h"
#include "out123.h"
int main()
{
mpg123_handle * _mpg_handle;
out123_handle * _out_handle;
double rate, channels, encoding;
size_t position, buffer_size;
unsigned char * buffer;
char filename[] = "Example.mp3";
mpg123_open( _mpg_handle, filename );
mpg123_getformat( _mpg_handle, &rate, &channels, &encoding );
out123_open( _out_handle, NULL, NULL );
mpg123_format_none( _mpg_handle );
mpg123_format( _mpg_handle, rate, channels, encoding );
out123_start( _out_handle, rate, channels, encoding );
buffer_size = mpg123_outblock( _mpg_handle );
buffer = malloc( buffer_size );
do
{
mpg123_read( _mpg_handle, buffer.get(), buffer_size, &position );
out123_play( _out_handle, buffer.get(), position );
} while ( position );
out123_close( _out_handle );
mpg123_close( _mpg_handle );
free( buffer );
}

Resources