Opengl uniform variable not working? - c

Ok. I'm getting into opengl 2.1(without the fixed function stuff) and I'm having troubles with shaders. I declare my uniform variable on my shader, and in my program I get the uniform location and assign it a value with gluniform but it doesn't seem to work.
These are my shaders
fragment shader:
#version 120
varying float color;
void
main ()
{
gl_FragColor = vec4(color, 0, 0, 1);
}
and my vertex shader:
#version 120
attribute vec2 position;
varying float color;
uniform float pr_color;
void
main ()
{
color = pr_color;
gl_Position = vec4(position, 0.0, 1.0);
}
This is how I'm passing the data to the shader:
void
display ()
{
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(g_program);
pos_loc = glGetAttribLocation(g_program, "position");
col_loc = glGetUniformLocation(g_program, "pr_color");
glUniform1f(col_loc, 1.0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(pos_loc);
glVertexAttribPointer(pos_loc, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glDrawArrays(GL_TRIANGLES, 0, 3);
glutPostRedisplay();
glutSwapBuffers();
}
and i don't think this is needed, but in any case, my initgl function
void
init ()
{
// Set clear color to black
glClearColor(0.0,0.0,0.0,0.0);
vshader = createShader(GL_VERTEX_SHADER, "vertex.glsl");
fshader = createShader(GL_FRAGMENT_SHADER, "fragment.glsl");
g_program = createProgram(vshader, fshader);
// Create vbo and send it the vertex data
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
}
the thing is, my shaders are working, if I hardcode the color into the fragment sahder, then the triangle is drawn, but if I pass the color to the vertex shader, it doesn't work, the triangle is drawn to black, which is strange, because my position attribute works, but my color uniform is set to 0. As you can see, I'm setting useprogram() before passing the uniform value.
EDIT: I changed the gluniform1i(col_loc, 1) to glUniformif(col_loc, 1.0). Still doesn't work
EDIT: I'll add my shader loading function to make sure the problem is not in the shader
GLuint
createShader (GLenum type, char* filename)
{
GLuint shader;
// Load file
FILE* file;
file = fopen(filename, "r");
if (file == NULL)
{
printf("Error reading file \n");
}
// Get Length
fseek(file, 0, SEEK_END);
long length = ftell(file);
fseek(file, 0, SEEK_SET);
// Get source
char* source;
source = malloc( (size_t) length + 1);
if (source == NULL)
{
printf("Error alocating space for shader\n");
}
// Read file
fread(source, 1, length, file);
// Close file
fclose(file);
source[length] = '\n';
// Create shader, attach it's source and compile it
shader = glCreateShader(type);
glShaderSource(shader, 1, (const GLchar*)&source, &length);
free(source); // Free shader source, once it's attached
glCompileShader(shader);
// Check for errors
GLint shader_status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &shader_status);
if (!shader_status) {
fprintf(stderr, "Failed to compile %s:\n", filename);
GLint log_length;
char *log;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
log = malloc(log_length);
glGetShaderInfoLog(shader, log_length, NULL, log);
fprintf(stderr, "%s", log);
free(log);
glDeleteShader(shader);
return 0;
}
return shader;
}
When there's an error in the shader, the program actually prints the error log, so I don't think the error is in here, but anyway, here it is.
EDIT: program linker code
GLuint
createProgram (GLuint vertexs, GLuint fragments)
{
GLint program_ok;
// Create program and attach to shaders
GLuint program = glCreateProgram();
glAttachShader(program, vertexs);
glAttachShader(program, fragments);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &program_ok);
if (!program_ok) {
fprintf(stderr, "Failed to link shader program:\n");
GLint log_length;
char *log;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
log = malloc(log_length);
glGetProgramInfoLog(program, log_length, NULL, log);
fprintf(stderr, "%s", log);
free(log);
glDeleteProgram(program);
return 0;
}
return program;
}
Please tell me if there's anything wrong with my shaders/program, I was just starting to get the basics of programmable pipeline and now I can't even render a triangle

You are using
glUniform1i(col_loc, 1);
To set the value of
uniform float pr_color;
1i stands for 1 int. Use 1f for 1 float.
glUniform1f(col_loc, 1.0f);

OK, I got it working with glUnifrom4fv(). I am now passing a vec4 to the shader's color variable and it works!

Related

Why is my triangle white in OpenGL ES 3 on Raspberry Pi

I have a very simple example of a OpenGL ES program that I'm trying to get to run on RaspiOS Desktop (a.k.a. Raspbian) on Raspberry Pi 4.
My goal is very simple - to draw a red triangle in the center of the screen. However, the triangle comes out as white instead of red.
I've searched and tried everything and wasn't able to find any help. I'm very frustrated at this point because this was just supposed to be the first tutorial to introduce the world of OpenGL ES and I'm already stuck and can't continue with more complicated examples.
Anyway, here's the full example
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <stdio.h>
#include <stdbool.h>
static struct glData {
GLuint program;
GLuint vbo;
} glData;
const char vert_shader_source[] = "#version 300 es \n"
"precision mediump float; \n"
"layout (location = 0) in vec3 Position; \n"
"void main() \n"
"{ \n"
" gl_Position = vec4(Position, 1.0); \n"
"} \n";
const char frag_shader_source[] = "#version 300 es \n"
"precision mediump float; \n"
"out vec4 fragColor; \n"
"void main() \n"
"{ \n"
" fragColor = vec4(1.0f, 0.0f, 0.0f, 1.0f); \n"
"} \n";
#define POSITION 0
bool initWindow(int* argc, char** argv)
{
glutInit(argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_MULTISAMPLE);
glutCreateWindow("Triangle");
GLenum glew_status = glewInit();
if (glew_status != GLEW_OK) {
fprintf(stderr, "Error: %s\n", glewGetErrorString(glew_status));
return false;
}
return true;
}
static GLuint buildShader(const char* shader_source, GLenum type)
{
GLuint shader;
GLint status;
shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to compile shader\n");
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetShaderInfoLog(shader, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
return false;
}
return true;
}
static GLuint createAndLinkProgram(GLuint v_shader, GLuint f_shader)
{
GLuint program;
GLint linked;
program = glCreateProgram();
if (program == 0) {
fprintf(stderr, "failed to create program\n");
return 0;
}
glAttachShader(program, v_shader);
glAttachShader(program, f_shader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (linked != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to link program\n");
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetProgramInfoLog(program, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
glDeleteProgram(program);
return 0;
}
return program;
}
static bool initProgram()
{
GLuint v_shader, f_shader;
v_shader = buildShader(vert_shader_source, GL_VERTEX_SHADER);
if (v_shader == 0) {
fprintf(stderr, "failed to build vertex shader\n");
return false;
}
f_shader = buildShader(frag_shader_source, GL_FRAGMENT_SHADER);
if (f_shader == 0) {
fprintf(stderr, "failed to build fragment shader\n");
glDeleteShader(v_shader);
return false;
}
glReleaseShaderCompiler(); // should release resources allocated for the compiler
glData.program = createAndLinkProgram(v_shader, f_shader);
if (glData.program == 0) {
fprintf(stderr, "failed to create and link program\n");
glDeleteShader(v_shader);
glDeleteShader(f_shader);
return false;
}
glUseProgram(glData.program);
// this won't actually delete the shaders until the program is closed but it's a good practice
glDeleteShader(v_shader);
glDeleteShader(f_shader);
return true;
}
bool setupOpenGL()
{
if (!initProgram()) {
fprintf(stderr, "failed to initialize program\n");
return false;
}
GLfloat vVertices[] = {
-0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
};
glClearColor(0, 0, 0, 1);
glGenBuffers(1, &glData.vbo);
glBindBuffer(GL_ARRAY_BUFFER, glData.vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vVertices), vVertices, GL_STATIC_DRAW);
return true;
}
void reshape(int width, int height)
{
glViewport(0, 0, width, height);
}
void drawTriangle()
{
glClear(GL_COLOR_BUFFER_BIT);
glEnableVertexAttribArray(POSITION);
glBindBuffer(GL_ARRAY_BUFFER, glData.vbo);
glVertexAttribPointer(POSITION, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(POSITION);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
printf("initialize window\n");
if (!initWindow(&argc, argv)) {
fprintf(stderr, "failed to initialize window\n");
return EXIT_FAILURE;
}
printf("setup opengl\n");
if (!setupOpenGL()) {
fprintf(stderr, "failed to setup opengl\n");
return EXIT_FAILURE;
}
glutDisplayFunc(drawTriangle);
glutReshapeFunc(reshape);
glutMainLoop();
glDeleteProgram(glData.program);
return EXIT_SUCCESS;
}
Before you run it, you need to:
Run sudo raspi-config
Go to Advanced Options > GL Driver
Enable GL (Fake KMS)
Reboot
Then you can compile and run it like this:
gcc triangle.c -Wall -lm -lglut -lGLEW -lGL -o triangle
./triangle
At first I thought maybe it's some bug in the driver or something. But then I found this example and tried to run it and it draws some graphs with multiple colors and it's fine.
I'd appreaciate any help. I've been trying to debug this for days now.
Nvm, turns out I'm an idiot.
This entire time it was a simple typo in buildShader() function. Here's the fixed version of that function:
static GLuint buildShader(const char* shader_source, GLenum type)
{
GLuint shader;
GLint status;
shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to compile shader\n");
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetShaderInfoLog(shader, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
return 0;
}
return shader;
}
The problem was that I was accidentally returning true/false instead of the shader. I have absolutely no idea how the program still managed to run without an error and display a white triangle but that's how it is.

"syntax error, unexpected $end" when compiling GLSL programs?

I'm having trouble compiling shaders. I'm pretty sure the issue is with my file loading code.
These are the relevant functions:
char* loadShader(char* filename){
char* buffer = 0;
long length;
FILE* f = fopen (filename, "rb");
if(!f){
// error reading file
printf("failed to open file\n");
}
if (f){
fseek (f, 0, SEEK_END);
length = ftell (f);
fseek (f, 0, SEEK_SET);
buffer = malloc (length+1);
if (buffer){
fread (buffer, 1, length, f);
}
fclose (f);
}
return buffer;
}
Renderer* initRenderer(){
Renderer* renderer = malloc(sizeof(Renderer));
//renderer->vertices[0] = -0.5f;
float verts[] = {
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f
};
for(int i = 0; i < 9; i++){
renderer->vertices[i] = verts[i];
}
glGenBuffers(1, &renderer->VBO);
glBindBuffer(GL_ARRAY_BUFFER, renderer->VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(renderer->vertices), renderer->vertices, GL_STATIC_DRAW);
GLchar* vst = (GLchar*)loadShader("game_data/assets/shaders/simple_vert.glsl");
GLchar* fst = (GLchar*)loadShader("game_data/assets/shaders/simple_frag.glsl");
const GLchar* vertexShaderSource = vst;
const GLchar* fragmentShaderSource = fst;
if(vst){free(vst);}
if(fst){free(fst);}
int success;
char infoLog[512];
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if(!success)
{
glGetShaderInfoLog(vertexShader, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n");
printf("%s\n", infoLog);
//std::cout << "ERROR::SHADER::VERTEX::COMPILATION_FAILED\n" << infoLog << std::endl;
}
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if(!success)
{
glGetShaderInfoLog(fragmentShader, 512, NULL, infoLog);
printf("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n");
printf("%s\n", infoLog);
//std::cout << "ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n" << infoLog << std::endl;
}
return renderer;
}
The vertex shader:
#version 330 core
layout (location = 0) in vec3 aPos;
void main()
{
gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);
}
The fragment shader:
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);
}
And finally the error message:
ERROR::SHADER::VERTEX::COMPILATION_FAILED
0:1(1): error: syntax error, unexpected $end
ERROR::SHADER::FRAGMENT::COMPILATION_FAILED
0:1(1): error: syntax error, unexpected $end
I'm sure I'm missing something simple with the strings. I looked at this answer with a similar problem but I wasn't able to glean anything from it.
Not an expert in file opening but your opening it in rb (read binary) mode, maybe try just opening it in r (read) mode.

Can't change background color in OpenGL

I've just started to experiment with OpenGL (using freeglut and GLEW). I can get a window to pop up, but nothing gets drawn to it, I can't even get it to change background color.
Here's what the main function looks like:
int
main(int argc, char **argv)
{
GLenum err;
/* Initialize GLUT library */
glutInit(&argc, argv);
/* Set window mode */
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
/* Choose OpenGL version */
glutInitContextVersion(3, 3);
/* Choose OpenGL profile */
glutInitContextFlags(GLUT_CORE_PROFILE);
/* Create a GLUT window */
glutCreateWindow("OpenGL 2D");
/* Initialize GLEW library */
glewExperimental = GL_TRUE;
err = glewInit();
if(err != GLEW_OK) {
printf("Cannot initialize GLEW: %s\n",
glewGetErrorString(err));
return 1;
}
/* WORKAROUND: Ignore all GLEW errors */
while(glGetError() != GL_NO_ERROR)
;
(void) printf("OpenGL vendor: \"%s\"\n"
" renderer: \"%s\"\n"
" version: \"%s\"\n"
" SL version: \"%s\"\n",
glGetString(GL_VENDOR),
glGetString(GL_RENDERER),
glGetString(GL_VERSION),
glGetString(GL_SHADING_LANGUAGE_VERSION));
/* Set up callback function for "reshape" event */
printf("Setting callback functions\n");
glutReshapeFunc(canvas_reshape);
/* Set up callback function for "display" event */
glutDisplayFunc(canvas_display);
/* Set up callback function for "keyboard" event */
glutKeyboardFunc(canvas_keyboard);
/* Set up callback function for "timer" event */
glutTimerFunc(30, canvas_timer, 0);
/* Initialize OpenGL */
init();
/* Choose the window's position */
glutPositionWindow(100, 100);
/* Choose the window's size */
glutReshapeWindow(800, 600);
/* Start main loop */
printf("Entering main loop\n");
glutMainLoop();
return 0;
}
Here's what init looks like:
init()
{
const GLchar *vertex_shader_source[] = {
"#version 330 core\n",
"\n",
"layout(location=0) in vec4 position;\n",
"out vec4 color;\n",
"uniform mat4 viewtrans;\n",
"\n",
"void\n",
"main()\n",
"{\n",
" gl_Position = viewtrans * position;\n"
" color = vec4(1.0, 1.0, 1.0, 1.0);\n"
"}\n" };
const GLchar *fragment_shader_source[] = {
"#version 330 core\n",
"in vec4 color;\n",
"layout(location=0) out vec4 fcolor;\n",
"void\n",
"main()\n",
"{\n",
" fcolor = color;\n",
"}\n" };
char compiler_log[LOGSIZE];
/* Obtain an unused name for our vertex array */
printf("Creating vertex array\n");
glGenVertexArrays(1, &vertex_array);
check_error("glGenVertexArrays");
/* Tell OpenGL to use the new vertex array */
glBindVertexArray(vertex_array);
check_error("glBindVertexArray");
/* Obtain an unused name for our vertex buffer */
printf("Creating vertex buffer\n");
glGenBuffers(1, &vertex_buffer);
check_error("glGenBuffers");
/* Tell OpenGL to use the new vertex buffer as the
* vertex array */
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
check_error("glBindBuffer");
/* We want to get two coordinates from the vertex buffer and
* feed them as the first parameter to the vertex shader */
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 2, 0);
/* Create vertex shader */
printf("Creating vertex shader\n");
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
check_error("glCreateShader");
/* Assign vertex shader source code */
glShaderSource(vertex_shader, sizeof(vertex_shader_source) / sizeof(GLchar *),
vertex_shader_source, 0);
check_error("glShaderSource");
/* Compile vertex shader */
glCompileShader(vertex_shader);
check_error("glCompileShader");
/* Get compiler log *//* Set up callback function for "timer" event */
glGetShaderInfoLog(vertex_shader, LOGSIZE, 0, compiler_log);
printf(" Compiler log: \"%s\"\n", compiler_log);
/* Create fragment shader */
printf("Creating fragment shader\n");
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
check_error("glCreateShader");
/* Assign fragment shader source code */
glShaderSource(fragment_shader, sizeof(fragment_shader_source) / sizeof(GLchar *),
fragment_shader_source, 0);
check_error("glShaderSource");
/* Compile fragment shader */
glCompileShader(fragment_shader);
check_error("glCompileShader");
/* Get compiler log */
glGetShaderInfoLog(fragment_shader, LOGSIZE, 0, compiler_log);
printf(" Compiler log: \"%s\"\n", compiler_log);
/* Create shader program */
printf("Creating shader program\n");
shader_program = glCreateProgram();
check_error("glCreateProgram");
/* Attach vertex shader */
glAttachShader(shader_program, vertex_shader);
check_error("glAttachShader");
/* Attach fragment shader */
glAttachShader(shader_program, fragment_shader);
check_error("glAttachShader");
/* Link shader program */
glLinkProgram(shader_program);
check_error("glLinkProgram");
/* Get linker log */
glGetProgramInfoLog(shader_program, LOGSIZE, 0, compiler_log);
printf(" Linker log: \"%s\"\n", compiler_log);
/* Get location of "viewtrans" matrix */
viewtrans = glGetUniformLocation(shader_program, "viewtrans");
check_error("glGetUniformLocation");
/* Tell OpenGL to use the new shader program */
glUseProgram(shader_program);
/* Choose background color */
glClearColor(1.0, 0.0, 1.0, 0.0);
}
Here's what I use to draw:
static void
canvas_display()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
This is all based off of a tutorial. What actually happens is something like a 300 x 300 white window shows up, gets reshaped into a 800 x 600 black window, BUT with still a 300 x 300 white square in the middle or in a corner, whose pixels (partially) turn black when resizing the window such that (part of) the square disappears (either manually or through code). I've also written code to draw triangles, but that doesn't work either, as one might expect. But here's the code for that anyway:
static void
canvas_display()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
GLfloat vertices[] =
{ 0.0, 0.0,
0.5, 0.5,
0.5, 0.0 };
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(GL_FLOAT) * 2, vertices);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
Do not continuously specify the new vertex array objects. Create 1 Vertex Array Object at initialization. The vertex array is contained in the Vertex Buffer Object. When a named buffer object is bound to the ARRAY_BUFFER target, then the last parameter of glVertexAttribPointer is treated as a byte offset into the buffer object's data store. Thus the offset has to be NULL:
void init()
{
// [...]
GLfloat vertices[] =
{ 0.0, 0.0,
0.5, 0.5,
0.5, 0.0 };
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(GL_FLOAT) * 2, NULL);
}
Ensure that the program is installed and the VAO is bound when the geometry is drawn. By default the values of uniforms are initialized by zero. At least you have to set the Identity matrix then matrix uniform viewtrans.
Since you use double buffering (GLUT_DOUBLE), you have to swaps the buffers of the current window after drawing all the geometry by glutSwapBuffers
void canvas_display()
{
glClearColor(1.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shader_program);
GLfloat identity_mat[] = { 1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1 };
glUniformMatrix4fv(viewtrans, 1, GL_FALSE, identity_mat);
glBindVertexArray(vertex_array);
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
glutPostRedisplay();
}

OpenGL compute shader not changing buffer

I want to change the content of a buffer that i share between multiple shaders, in the compute shader. Specifically the normals in this case.
No matter what i set the value of one or more vertices in the buffer to, it doesn't seem to take effect in any of the other shaders. Probably i just forgot something or am not seeing something (I am new to compute shaders)
Here is the relevant part of the code. If you need more parts of the code, just ask. Please understand though that its hard to give a working piece of code because that requires quiet a bit more code than just a few lines.
utils_createComputeProgram(&g_simulateWaterProgram, "content/shaders/simulatewater/simulatewater.comp");
// Initialize first position buffer
{
glGenBuffers(1, &app->positionBufferOne);
glBindBuffer(GL_ARRAY_BUFFER, app->positionBufferOne);
glBufferData(GL_ARRAY_BUFFER, NUM_VERTICES*sizeof(MLvec4), NULL, GL_STATIC_DRAW);
MLvec4* positions = glMapBuffer(GL_ARRAY_BUFFER, GL_READ_WRITE);
for (int i = 0; i < NUM_VERTICES; ++i) {
...
positions[i] = mlMakeVec4(x, y, z, v);
}
glUnmapBuffer(GL_ARRAY_BUFFER);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
// Initialize second position buffer with 0
{
glGenBuffers(1, &app->positionBufferTwo);
glBindBuffer(GL_ARRAY_BUFFER, app->positionBufferTwo);
glBufferData(GL_ARRAY_BUFFER, NUM_VERTICES*sizeof(MLvec4), NULL, GL_STATIC_DRAW);
MLvec4* positions = glMapBuffer(GL_ARRAY_BUFFER, GL_READ_WRITE);
for (int i = 0; i < NUM_VERTICES; ++i) {
positions[i] = mlMakeVec4(0, 0, 0, 0);
}
glUnmapBuffer(GL_ARRAY_BUFFER);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
// initialize normal buffer
{
glGenBuffers(1, &app->normalBuffer);
glBindBuffer(GL_ARRAY_BUFFER, app->normalBuffer);
glBufferData(GL_ARRAY_BUFFER, NUM_VERTICES*sizeof(MLvec4), NULL, GL_STATIC_DRAW);
MLvec4* normals = glMapBuffer(GL_ARRAY_BUFFER, GL_READ_WRITE);
for (int i = 0; i < NUM_VERTICES; ++i) {
normals[i] = mlMakeVec4(0, 0, 1, 0);
}
glUnmapBuffer(GL_ARRAY_BUFFER);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
// Attach buffers to program
{
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 0, app->positionBufferOne);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 1, app->positionBufferTwo);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 2, app->normalBuffer);
}
utils_createProgramVertexFragment(&g_renderWaterProgram, "content/shaders/renderwater/renderwater.vert", "content/shaders/renderwater/renderwater.frag");
// initialize VAO
{
glGenVertexArrays(1, &g_waterVertexArrayObject);
utils_setAttributePointer(g_waterVertexArrayObject, app->positionBufferOne, 0, 4, GL_FLOAT, GL_FALSE, 0, 0);
utils_setAttributePointer(g_waterVertexArrayObject, app->positionBufferTwo, 1, 4, GL_FLOAT, GL_FALSE, 0, 0);
utils_setAttributePointer(g_waterVertexArrayObject, app->normalBuffer, 2, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
// initialize index array
{
...
glBindVertexArray(g_waterVertexArrayObject);
glGenBuffers(1, &g_elementArrayBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, g_elementArrayBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * NUM_INDICES, g_waterIndices, GL_STATIC_DRAW);
}
void rendering_render(Application* app, int framebufferWidth, int framebufferHeight) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, framebufferWidth, framebufferHeight);
MLvec3 center = mlMakeVec3(app->camera.cx, app->camera.cy, app->camera.cz);
MLvec3 eye = positionOfCamera(app->camera);
MLmat4 modelMatrix = mlIdentity4;
MLmat4 projectionMatrix = mlMakePerspectiveFromFOV(100, (float)framebufferWidth / framebufferHeight, 0.01, 100.0);
MLmat4 viewMatrix = mlMakeLookAt(eye, center, mlMakeVec3(0, 1, 0));
// Hier muss gewartet werden, bis der Compute Shader alle Schreiboperationen auf die Shader Storage Buffer ausgeführt hat.
//glMemoryBarrier(GL_SHADER_STORAGE_BARRIER_BIT);
glMemoryBarrier(GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT);
glUseProgram(g_renderWaterProgram);
utils_setUniformMat4(renderWaterProgram_ProjectionMatrix, &projectionMatrix);
utils_setUniformMat4(renderWaterProgram_ViewMatrix, &viewMatrix);
utils_setUniformMat4(renderWaterProgram_ModelMatrix, &modelMatrix);
if (app->debug) {
glDrawElements(GL_POINTS, NUM_INDICES, GL_UNSIGNED_INT, NULL);
}
else {
glDrawElements(GL_TRIANGLES, NUM_INDICES, GL_UNSIGNED_INT, NULL);
}
glUseProgram(0);
}
Compute shader:
layout (local_size_x = 1000, local_size_y = 1, local_size_z = 1) in;
layout (location = 0) uniform float Dt;
layout (std430, binding = 0) buffer PositionBufferOne {
vec4 positions[];
};
layout (std430, binding = 1) buffer PositionBufferTwo {
vec4 positionsNew[];
};
layout (std430, binding = 2) buffer NormalBuffer {
vec4 normals[];
};
vec3 calcNormal() {
return normalize(vec3(1, 1, 1));
}
void main() {
uint index = gl_GlobalInvocationID.x;
normals[index] = vec4(calcNormal(), 0.0);
}
utils:
GLuint utils_createShader(GLenum shaderType, const char* filename) {
GLuint shader = glCreateShader(shaderType);
const char* sources[2];
sources[0] = common_readfile("content/shaders/utils.glsl");
sources[1] = common_readfile(filename);
glShaderSource(shader, 2, sources, NULL);
free((void*)sources[0]);
free((void*)sources[1]);
glCompileShader(shader);
utils_checkShaderLog(filename, shader);
return shader;
}
void utils_createComputeProgram(GLuint *program, const char* computeShaderFilename) {
glDeleteProgram(*program);
GLuint computeShader = utils_createShader(GL_COMPUTE_SHADER, computeShaderFilename);
*program = glCreateProgram();
glAttachShader(*program, computeShader);
glLinkProgram(*program);
utils_checkProgramLog(*program);
glDeleteShader(computeShader);
}
void utils_setAttributePointer(GLuint vertexArrayObject, GLuint buffer, GLint location, GLint size, GLenum type, GLboolean normalized, GLsizei stride, unsigned offset) {
GLint previousVertexArrayObject;
glGetIntegerv(GL_VERTEX_ARRAY_BINDING, &previousVertexArrayObject);
GLint previousArrayBuffer;
glGetIntegerv(GL_ARRAY_BUFFER_BINDING, &previousArrayBuffer);
glBindVertexArray(vertexArrayObject);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glEnableVertexAttribArray(location);
glVertexAttribPointer(location, size, type, normalized, stride, (void*)(intptr_t)offset);
glBindBuffer(GL_ARRAY_BUFFER, previousArrayBuffer);
glBindVertexArray(previousVertexArrayObject);
}
void utils_setUniformMat4(GLuint location, MLmat4* m) {
glUniformMatrix4fv(location, 1, GL_FALSE, (GLfloat*)m);
}
I tested changes by using the normals as the colors for my fragments.
In this example i set the normals to 0/0/0 during initialization and in the compute shader i change the value of every normal to normalized 1/1/1 thus expect so see the geometry in somewhat of a greyish color, but it stays black.
// EDIT
After starting from scratch and testing the result basically after every row of code i added i found out the problem was this line: glDispatchCompute(NUM_VERTICES / NUM_PARTICLES_PER_LOCAL_WORK_GROUP, 1, 1); i guess this evaluated to zero, which kind of makes sense with a "low" number of vertices and relatively high number of particles per work group.
That resulted in whatever i did in the compute shader was never actually being executed thus, not changing the buffer content.

opengl Soil texture

Here is the error message I am recieving from VS2010:
Unhandled exception at 0x77358dc9 in AzimuthalEqui.exe:
0xC0000005: Access violation writing location 0x00000014.
The line of code generating this error is:
texture[1] = SOIL_load_OGL_texture
(
texture_filename,
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID, // create ID automatically
SOIL_flags
);
before entering this function:
texture_filename = "Data/texture.jpg"
SOIL_LOAD_AUTO (= 0)
SOIL_CREATE_NEW_ID (=0)
SOIL_flags = 16 (=SOIL_FLAG_INVERT_Y)
The problem is that when I include a file I have written to parse some information in a text file an error is generated, otherwise the texture loads and is displayed. The prob is most likely caused by one function as when this particular function is removed the code loads and displays the texture.
Here is the function that when added to my project makes the error occur:
[CODE]
void get_user_points(double *lats, double *longs){
char buffer[BUFFSIZE_PARSE];
char *p_buff = buffer;
FILE *fp;
const char *filename = "Points.txt";
double temp;
double *tmp_p = &temp;
fp = fopen(filename,"r+");
if (fp == NULL)
{
sprintf(buffer, "Can't Find File: %s", filename);
MessageBoxA(NULL, buffer, "ERROR", MB_OK|MB_ICONEXCLAMATION);
exit(0);
}
fgets(buffer, BUFFSIZE_PARSE, fp);
while (*(p_buff+1) != '\0'){
p_buff = get_next_letter(p_buff);
switch (tolower(*p_buff)){
case 'n':
putchar(*p_buff);
p_buff++;
p_buff=get_next_double(lats, p_buff);
printf(" = %f\n", *lats);
break;
case 's':
...
...
}
}
putchar('\n');
fclose(fp);
}
It has something to do with opening the file for reading... if I comment out these lines the texture loads correctly:
//fpr = fopen(filename2,"rb");
...
...
...
//fclose(fpr);
Here is my reduced code (i don't think this is the issue but just in case). [There may be some remanants of my full code]:
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
{
printf("Initialising...\n");
glutInitWindowSize(700, 700);
glutInitWindowPosition(0, 0);
glutCreateWindow ("SOIL Texture Test");
}
InitGL();
glutDisplayFunc(DrawGLScene);
glutReshapeFunc(ReSizeGLScene);
glutMainLoop();
return 0;
}
int InitGL(GLvoid) // Setup OpenGL
{
//Load textures
if (!LoadGLTextures()) // Jump To Texture Loading Routine ( NEW )
{
MessageBox(NULL,TEXT("Cannot Load Image for Texture Map"),TEXT("Error!"),MB_OK | MB_ICONINFORMATION);
//return false; // If Texture Didn't Load Return FALSE ( NEW )
}
else
glEnable(GL_TEXTURE_2D); // Enable texture mapping
glShadeModel(GL_SMOOTH); // Enable Smooth Shading
glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Set the background colour (to black)
glClearDepth(1.0f); // Depth Buffer Setup
glEnable(GL_DEPTH_TEST); // Enables Depth Testing
glDepthFunc(GL_LEQUAL); // Select testing type
//get_user_points(&user_lat[0], &user_long[0]);
return true; // Initialization went OK
}
void DrawGLScene(GLvoid) // OpenGL drawing function
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the Screen and the Depth Buffer
glLoadIdentity();
glTranslatef(0.0f,0.0f,z);
glBindTexture (GL_TEXTURE_2D, texture[filter]);
glBegin (GL_QUADS);
glNormal3f(0, 0, 1);
glTexCoord2f (0,0);
glVertex3f (-3,-3 ,0);
glTexCoord2f (1,0 );
glVertex3f (3,-3 , 0);
glTexCoord2f (1, 1);
glVertex3f (3,3 , 0);
glTexCoord2f (0,1 );
glVertex3f (-3,3 ,0 );
glEnd();
glFlush();
}
void ReSizeGLScene(int w, int h) // Code to resize (and initialise) the OpenGL scene (run once when in fullscreen mode)
{
// Set up perspective view matrix
glViewport(0, 0, w, h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0f, (GLfloat)w/(GLfloat)h, 0.1f, 100.0f);
//glOrtho(-50.0, 50.0, -50.0, 50.0, -50.0, 50.0);
// Set up modelview matrix
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
int LoadGLTextures(void) // Load Bitmaps And Convert To Textures
{
unsigned int SOIL_flags;
GLint mag_param;
GLint min_param;
printf("Loading Textures... ");
SOIL_flags = SOIL_FLAG_INVERT_Y;
mag_param = GL_NEAREST;
min_param = GL_NEAREST;
texture[1] = SOIL_load_OGL_texture
(
texture_filename,
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID, // create ID automatically
SOIL_flags
);
if(texture[1] == 0)
return false;
glBindTexture(GL_TEXTURE_2D, texture[1]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,mag_param);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,min_param);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
printf("Textures Loaded\n");
return true;
}
Including the .c files in my project instead of the .lib file fixed the problem.

Resources