glfw3 not drawing if OpenGL 3+ requested? - c

Below I've included glfw-quick-test.c, which is basically copied verbatim from http://www.glfw.org/docs/latest/quick.html - except for removed use of glad, added background color, a couple of defines so it compiles on Ubuntu 14.04 (64-bit), and a preprocessor #ifdef switch (macro DO_OPENGL_THREE) to change requested OpenGL version.
When I compile with:
gcc -g -o glfw-quick-test.exe glfw-quick-test.c -lglfw -lGLU -lGL -lm
./glfw-quick-test.exe
... then I get the message "GLFW Requesting OpenGL 2.1" and drawing is fine:
When I compile with:
gcc -g -DDO_OPENGL_THREE -o glfw-quick-test.exe glfw-quick-test.c -lglfw -lGLU -lGL -lm
./glfw-quick-test.exe
... then I get the message "GLFW Requesting OpenGL 3.2" and the rotating triangle is not drawn at all - only the background color is preserved:
Can anyone explain why does this happen? Can I somehow get GLFW3 to draw even if OpenGL 3.2 is requested, and if so, how?
(I'm aware that the original source code says "// NOTE: OpenGL error checks have been omitted for brevity", but I'm not sure what sort of error checks I should add, to see what the problem would be with OpenGL 3.2 drawing ...)
The code, glfw-quick-test.c (EDIT: now with error checking):
// http://www.glfw.org/docs/latest/quick.html
#define UBUNTU14
#ifdef UBUNTU14 // assume Ubuntu 14.04
// strange; Ubuntu 14 GLFW/glfw3.h doesn't have GLFW_TRUE, GLFW_FALSE, mentions GL_TRUE GL_FALSE
#define GLFW_TRUE GL_TRUE
#define GLFW_FALSE GL_FALSE
#endif
//~ #include <glad/glad.h> // "GL/GLES/EGL/GLX/WGL Loader-Generator based on the official specs."
#include <GLFW/glfw3.h>
#include "linmath.h"
#include <stdlib.h>
#include <stdio.h>
static const struct
{
float x, y;
float r, g, b;
} vertices[3] =
{
{ -0.6f, -0.4f, 1.f, 0.f, 0.f },
{ 0.6f, -0.4f, 0.f, 1.f, 0.f },
{ 0.f, 0.6f, 0.f, 0.f, 1.f }
};
static const char* vertex_shader_text =
"uniform mat4 MVP;\n"
"attribute vec3 vCol;\n"
"attribute vec2 vPos;\n"
"varying vec3 color;\n"
"void main()\n"
"{\n"
" gl_Position = MVP * vec4(vPos, 0.0, 1.0);\n"
" color = vCol;\n"
"}\n";
static const char* fragment_shader_text =
"varying vec3 color;\n"
"void main()\n"
"{\n"
" gl_FragColor = vec4(color, 1.0);\n"
"}\n";
static void error_callback(int error, const char* description)
{
fprintf(stderr, "Error: %s\n", description);
}
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
void checkGLerrors(char *label) {
// check OpenGL error
GLenum err;
while ((err = glGetError()) != GL_NO_ERROR) {
char* errorstr = "";
switch(err) {
case GL_INVALID_OPERATION: errorstr="INVALID_OPERATION"; break;
case GL_INVALID_ENUM: errorstr="INVALID_ENUM"; break;
case GL_INVALID_VALUE: errorstr="INVALID_VALUE"; break;
case GL_OUT_OF_MEMORY: errorstr="OUT_OF_MEMORY"; break;
case GL_INVALID_FRAMEBUFFER_OPERATION: errorstr="INVALID_FRAMEBUFFER_OPERATION"; break;
}
printf("OpenGL error ('%s'): %d %s\n", label, err, errorstr);
}
}
int main(void)
{
GLFWwindow* window;
GLuint vertex_buffer, vertex_shader, fragment_shader, program;
GLint mvp_location, vpos_location, vcol_location;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
exit(EXIT_FAILURE);
// NB: Ubuntu will not draw if 3.2 (just black screen) - only if 2.0 or 2.1?
#ifdef DO_OPENGL_THREE
printf("GLFW Requesting OpenGL 3.2\n");
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); // only 3.2+
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE); //only 3.0+
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE); // https://stackoverflow.com/q/23834680/
#else
printf("GLFW Requesting OpenGL 2.1\n");
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2); // 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 1); // 0);
#endif
checkGLerrors("post hint");
window = glfwCreateWindow(640, 480, "Simple example", NULL, NULL);
if (!window)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
checkGLerrors("post glfwCreateWindow");
glfwSetKeyCallback(window, key_callback);
glfwMakeContextCurrent(window);
//~ gladLoadGLLoader((GLADloadproc) glfwGetProcAddress);
glfwSwapInterval(1);
// NOTE: OpenGL error checks have been omitted for brevity
glGenBuffers(1, &vertex_buffer);
checkGLerrors("post glGenBuffers");
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_text, NULL);
glCompileShader(vertex_shader);
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_text, NULL);
glCompileShader(fragment_shader);
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
checkGLerrors("post glLinkProgram");
mvp_location = glGetUniformLocation(program, "MVP");
vpos_location = glGetAttribLocation(program, "vPos");
vcol_location = glGetAttribLocation(program, "vCol");
checkGLerrors("post gl locations");
glEnableVertexAttribArray(vpos_location);
checkGLerrors("post gl EnableVertexAttribArray");
glVertexAttribPointer(vpos_location, 2, GL_FLOAT, GL_FALSE,
sizeof(float) * 5, (void*) 0);
checkGLerrors("post glVertexAttribPointer");
glEnableVertexAttribArray(vcol_location);
checkGLerrors("post glEnableVertexAttribArray");
glVertexAttribPointer(vcol_location, 3, GL_FLOAT, GL_FALSE,
sizeof(float) * 5, (void*) (sizeof(float) * 2));
checkGLerrors("post glVertexAttribPointer");
while (!glfwWindowShouldClose(window))
{
float ratio;
int width, height;
mat4x4 m, p, mvp;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / (float) height;
glViewport(0, 0, width, height);
glClearColor(0.784314, 0.780392, 0.305882, 1.0); // add background color
glClear(GL_COLOR_BUFFER_BIT);
mat4x4_identity(m);
mat4x4_rotate_Z(m, m, (float) glfwGetTime());
mat4x4_ortho(p, -ratio, ratio, -1.f, 1.f, 1.f, -1.f);
mat4x4_mul(mvp, p, m);
glUseProgram(program);
glUniformMatrix4fv(mvp_location, 1, GL_FALSE, (const GLfloat*) mvp);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}

Thanks to #BDL and his comment about VAO (vertex array objects), I found How to use VBOs without VAOs with OpenGL core profile? - and by trying out stuff from there, I found that the only change to the above OP code, needed so drawing is shown in OpenGL 3.2, is this:
...
//~ gladLoadGLLoader((GLADloadproc) glfwGetProcAddress);
glfwSwapInterval(1);
#ifdef DO_OPENGL_THREE
// https://stackoverflow.com/a/30057424/277826:
// "You can however just create and bind a VAO and forget about it (keep it bound)."
GLuint VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
#endif
// NOTE: OpenGL error checks have been omitted for brevity
glGenBuffers(1, &vertex_buffer);
...
Once this section with VAO is in, there are no more OpenGL errors printed, and drawing is OK.

Related

Why is no character being rendered

So I am trying to render a character in OpenGL using freetype2. If I replace the variable vertex_location in my code with 0 I can see some kind of pixelated thing being rendered but it seems wrong because every time I restart the application the pixelated thing is different... So I am guessing it's just some random bytes or something.
Note: I am using GLEW, freetype2, glfw3, cglm
Anyways, here is my code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdio.h>
#include <cglm/cglm.h>
#include <cglm/call.h>
#include <math.h>
#include <ft2build.h>
#include FT_FREETYPE_H
/**
* Capture errors from glfw.
*/
static void error_callback(int error, const char* description)
{
fprintf(stderr, "Error: %s\n", description);
}
/**
* Capture key callbacks from glfw
*/
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
typedef struct CHARACTER_STRUCT
{
GLuint texture; // ID handle of the glyph texture
vec2 size; // Size of glyph
float width;
float height;
float bearing_left;
float bearing_top;
GLuint advance; // Horizontal offset to advance to next glyph
} character_T;
character_T* get_character(char c)
{
// FreeType
FT_Library ft;
// All functions return a value different than 0 whenever an error occurred
if (FT_Init_FreeType(&ft))
perror("ERROR::FREETYPE: Could not init FreeType Library");
// Load font as face
FT_Face face;
if (FT_New_Face(ft, "/usr/share/fonts/truetype/lato/Lato-Medium.ttf", 0, &face))
perror("ERROR::FREETYPE: Failed to load font");
// Set size to load glyphs as
FT_Set_Pixel_Sizes(face, 0, 32);
// Disable byte-alignment restriction
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Load character glyph
if (FT_Load_Char(face, c, FT_LOAD_RENDER))
perror("ERROR::FREETYTPE: Failed to load Glyph");
// Generate texture
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RED,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_RED,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
// Set texture options
/*glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);*/
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
// Now store character for later use
character_T* character = calloc(1, sizeof(struct CHARACTER_STRUCT));
character->texture = texture;
character->width = face->glyph->bitmap.width;
character->height = face->glyph->bitmap.rows;
character->bearing_left = face->glyph->bitmap_left;
character->bearing_top = face->glyph->bitmap_top;
character->advance = face->glyph->advance.x;
glBindTexture(GL_TEXTURE_2D, 0);
// Destroy FreeType once we're finished
FT_Done_Face(face);
FT_Done_FreeType(ft);
return character;
}
int main(int argc, char* argv[])
{
glfwSetErrorCallback(error_callback);
/**
* Initialize glfw to be able to use it.
*/
if (!glfwInit())
perror("Failed to initialize glfw.\n");
/**
* Setting some parameters to the window,
* using OpenGL 3.3
*/
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_FLOATING, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
/**
* Creating our window
*/
GLFWwindow* window = glfwCreateWindow(640, 480, "My Title", NULL, NULL);
if (!window)
perror("Failed to create window.\n");
glfwSetKeyCallback(window, key_callback);
/**
* Enable OpenGL as current context
*/
glfwMakeContextCurrent(window);
/**
* Initialize glew and check for errors
*/
GLenum err = glewInit();
if (GLEW_OK != err)
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
fprintf(stdout, "Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
unsigned int VAO;
glGenVertexArrays(1, &VAO);
unsigned int VBO;
glGenBuffers(1, &VBO);
GLuint vertex_shader, fragment_shader, program;
GLint mvp_location, vertex_location;
/**
* Vertex Shader
*/
static const char* vertex_shader_text =
"#version 330 core\n"
"uniform mat4 MVP;\n"
"attribute vec4 thevertex;\n"
"out vec2 TexCoord;\n"
"void main()\n"
"{\n"
" gl_Position = MVP * vec4(thevertex.xy, 0.0, 1.0);\n"
" TexCoord = thevertex.zw;"
"}\n";
/**
* Fragment Shader
*/
static const char* fragment_shader_text =
"#version 330 core\n"
"varying vec3 color;\n"
"in vec2 TexCoord;\n"
"uniform sampler2D ourTexture;\n"
"void main()\n"
"{\n"
" vec4 sampled = vec4(1.0, 1.0, 1.0, texture(ourTexture, TexCoord).r);\n"
" gl_FragColor = vec4(vec3(1, 1, 1), 1.0) * sampled;\n"
"}\n";
int success;
char infoLog[512];
/**
* Compile vertex shader and check for errors
*/
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_text, NULL);
glCompileShader(vertex_shader);
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &success);
if(!success)
{
printf("Vertex Shader Error\n");
glGetShaderInfoLog(vertex_shader, 512, NULL, infoLog);
perror(infoLog);
}
/**
* Compile fragment shader and check for errors
*/
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_text, NULL);
glCompileShader(fragment_shader);
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &success);
if(!success)
{
printf("Fragment Shader Error\n");
glGetShaderInfoLog(fragment_shader, 512, NULL, infoLog);
perror(infoLog);
}
/**
* Create shader program and check for errors
*/
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &success);
if(!success)
{
glGetProgramInfoLog(program, 512, NULL, infoLog);
perror(infoLog);
}
/**
* Grab locations from shader
*/
vertex_location = glGetUniformLocation(program, "thevertex");
mvp_location = glGetUniformLocation(program, "MVP");
glBindVertexArray(VAO);
/**
* Create and bind texture
*/
character_T* character = get_character('s');
unsigned int texture = character->texture;
float scale = 1.0f;
GLfloat xpos = 0;
GLfloat ypos = 0;
GLfloat w = character->width * scale;
GLfloat h = character->height * scale;
GLfloat vertices[6][4] = {
{ xpos, ypos + h, 0.0, 0.0 },
{ xpos, ypos, 0.0, 1.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos, ypos + h, 0.0, 0.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos + w, ypos + h, 1.0, 0.0 }
};
/**
* Main loop
*/
while (!glfwWindowShouldClose(window))
{
int width, height;
mat4 p, mvp;
double t = glfwGetTime();
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT);
mat4 m = GLM_MAT4_IDENTITY_INIT;
glm_translate(m, (vec3){ -sin(t), -cos(t), 0 });
glm_ortho_default(width / (float) height, p);
glm_mat4_mul(p, m, mvp);
glUseProgram(program);
glUniformMatrix4fv(mvp_location, 1, GL_FALSE, (const GLfloat*) mvp);
/**
* Draw texture
*/
glActiveTexture(GL_TEXTURE0);
glBindVertexArray(VAO);
glBindTexture(GL_TEXTURE_2D, texture);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 6 * 4, vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(vertex_location);
glVertexAttribPointer(vertex_location, 4, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
thevertex is a vertex attribute.
attribute vec4 thevertex;
thus you have to get the resource index of the attribute by glGetAttribLocation rather than glGetUniformLocation.
vertex_location = glGetUniformLocation(program, "thevertex");
vertex_location = glGetAttribLocation(program, "thevertex");
glGetUniformLocation is meant to retrieve the the active resource index (location) of an uniform variable.

OpenGL shaders not working with uniform variable

I'm trying to write a code to draw a triangle with changing color, following the tutorial LearnOpenGL - Shaders.
But my code doesn't work, the triangle is blinking instead of gradually change the color, I don't know if it is a hardware or software problem.
Can anyone help me?
I'm using OpenGL 3.0 with Mesa 18.3.4.
And compiling with gcc shaders_change.c -lGL -lglfw -lm
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <GL/gl.h>
#include <GLFW/glfw3.h>
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void processInput(GLFWwindow *window);
const unsigned int SCR_WIDTH = 800;
const unsigned int SCR_HEIGHT = 600;
const char *vertexShaderSource ="#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos, 1.0);\n"
"}\n\0";
const char *fragmentShaderSource = "#version 330\n"
"out vec4 FragColor;\n"
"uniform vec4 ourColor;\n"
"void main()\n"
"{\n"
" FragColor = ourColor;\n"
"}\n\0";
int main(){
// glfw: initialize and configure
// ------------------------------
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// glfw window creation
// --------------------
GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", NULL, NULL);
if (window == NULL)
{
printf("Failed to create GLFW window");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
// build and compile our shader program
// ------------------------------------
// vertex shader
int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// check for shader compile errors
int success;
char infoLog[512];
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(vertexShader, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n%s",infoLog);
}
// fragment shader
int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// check for shader compile errors
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(fragmentShader, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n%s",infoLog);
}
// link shaders
int shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
// check for linking errors
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &success);
if (!success) {
glGetProgramInfoLog(shaderProgram, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n%s",infoLog);
}
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
float vertices[] = {
// positions // colors
0.5f, -0.5f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, // bottom left
0.0f, 0.5f, 0.0f, // top
};
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
// bind the Vertex Array Object first, then bind and set vertex buffer(s), and then configure vertex attributes(s).
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), 0);
glEnableVertexAttribArray(0);
// color attribute
//glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)(3 * sizeof(float)));
//glEnableVertexAttribArray(VAO);
// You can unbind the VAO afterwards so other VAO calls won't accidentally modify this VAO, but this rarely happens. Modifying other
// VAOs requires a call to glBindVertexArray anyways so we generally don't unbind VAOs (nor VBOs) when it's not directly necessary.
// as we only have a single shader, we could also just activate our shader once beforehand if we want to
glBindVertexArray(VAO);
// render loop
// -----------
float timeValue;
float greenValue;
int vertexColorLocation;
while (!glfwWindowShouldClose(window)){
// input
// -----
processInput(window);
// render
// ------
//glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
timeValue = glfwGetTime();
greenValue = sin(timeValue) / 2.0f + 0.5f;
vertexColorLocation = glGetUniformLocation(shaderProgram, "ourColor");
glUniform4f(vertexColorLocation, 0.0f, greenValue, 0.0f, 1.0f);
// render the triangle
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
glfwSwapBuffers(window);
glfwPollEvents();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
// glfw: terminate, clearing all previously allocated GLFW resources.
// ------------------------------------------------------------------
glfwTerminate();
return 0;
}
void processInput(GLFWwindow *window){
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, 1);
}
void framebuffer_size_callback(GLFWwindow* window, int width, int height){
glViewport(0, 0, width, height);
}
thank you all for help!!
I'd discover what is my problem, I'd just forgot to include de glad.h library and the libdl.

Something wrong when I try to draw a triangle with opengl and glew using glsl

I'm using OpenGL 3 and Glew in order to draw a triangle, I have a window (changing the background color works fine) but I can't put my shader on it. I did some tests like:
glGetProgramiv(shader_programme, GL_LINK_STATUS, &isLinked);
printf("\nProg : %i",isLinked);
And it's fine; print returns 1 for the program, the vertex and the frag.
I suppose I missed a clear somewhere, but I'm not sure and also pretty lost here...
This is my code:
#include "../include/scop.h"
#include <OpenGL/gl.h>
#include ".../lfw3/3.2.1/include/GLFW/glfw3.h"
t_scop *ft_init_window(t_scop *scop, t_parse parse)
{
if (!glfwInit())
ft_putstr("error init");
else
{
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
WIN = glfwCreateWindow(WIN_X, WIN_Y, "Scop", NULL, NULL);
glfwMakeContextCurrent(WIN);
glfwSetInputMode(WIN, GLFW_STICKY_KEYS, GL_TRUE);
glfwSetInputMode(WIN, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
glfwPollEvents();
glfwSetCursorPos(WIN, WIN_X / 2.0, WIN_Y / 2.0);
glClearColor(0.0f, 0.5f, 0.4f, 0.0f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
}
float points[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
//init buffer and fill it
GLuint vbo = 0;
glGenBuffers (1, &vbo);
glBindBuffer (GL_ARRAY_BUFFER, vbo);
glBufferData (GL_ARRAY_BUFFER, 9 * sizeof (float), points, GL_STATIC_DRAW);
//init VertexArray
GLuint vao = 0;
glGenVertexArraysAPPLE (1, &vao);
glBindVertexArrayAPPLE (vao);
glEnableVertexAttribArray (0);
glBindBuffer (GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer (0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glDrawArrays(GL_TRIANGLES, 0, 3);
const char* vertex_shader =
"#version 330 core\n"
"layout (location = 0) in vec3 position;\n"
"void main () {"
"gl_Position.xyz = position;"
"gl_Position.w = 1.0;"
"}\0";
const char* fragment_shader =
"#version 330 core\n"
"out vec3 color;"
"void main () {"
"color = vec3(1,0,0);"
"}\0";
//create vertex
GLuint vs = glCreateShader (GL_VERTEX_SHADER);
glShaderSource (vs, 1, &vertex_shader, NULL);
glCompileShader (vs);
//tests
GLint success = 0;
glGetShaderiv(vs, GL_COMPILE_STATUS, &success);
printf ("Taille du source:%i\n", success);
if (GL_FALSE == success)
printf("false");
else printf("true");
//create frag
GLuint fs = glCreateShader (GL_FRAGMENT_SHADER);
glShaderSource (fs, 1, &fragment_shader, NULL);
glCompileShader (fs);
//tests
success = 0;
glGetShaderiv(fs, GL_COMPILE_STATUS, &success);
printf("Taille fs : %i",success);
// GLuint shader_programme = LoadShaders (vs,fs);
GLint shader_programme = glCreateProgram ();
glAttachShader (shader_programme, vs);
glAttachShader (shader_programme, fs);
glLinkProgram (shader_programme);
//tests
GLint isLinked = 0;
glGetProgramiv(shader_programme, GL_LINK_STATUS, &isLinked);
printf("\nProg : %i",isLinked);
//idk if i need to do this now
glDetachShader(shader_programme, vs);
glDetachShader(shader_programme, fs);
glDeleteShader(vs);
glDeleteShader(fs);
glGetError();
while (!glfwWindowShouldClose(WIN))
{
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearDepth(GL_DEPTH_TEST);
glUseProgram (shader_programme);
glBindVertexArrayAPPLE (vao);
glDrawArrays (GL_TRIANGLES, 0, 3);
//glUseProgram(0); ???
glfwPollEvents ();
glBindVertexArrayAPPLE (0);
glfwSwapBuffers(WIN);
}
// glfwTerminate();
return (scop);
}
Any help is greatly appreciated!
The problem lies in this line:
glClearDepth(GL_DEPTH_TEST);
glClearDepth (doc) specifies with which value the depth buffer should be cleared and expects a floating point value between 0 and 1. It is the similar to glClearColor, just for depth.
Additionally, you should be using the core profile VAO functions instead of the ones from the APPLE extension. The apple extension should only be used in a OpenGL context <= 2.1.

glGenBuffers is null in GLEW?

I wrote a simple program using OpenGL, SDL 2, and GLEW. It works properly on Linux, under Wine, and on one Windows 7 system I ran it on. However, on another Windows system, it is crashing as soon as it gets to glGenBuffers, even though it says OpenGL 2.1 is available. Here is the code:
#define GLEW_STATIC
#include <stdio.h>
#include <GL/glew.h>
#include <SDL2/SDL.h>
SDL_Window *window;
SDL_GLContext context;
GLuint vbo;
void Init() {
SDL_Init(SDL_INIT_EVERYTHING);
SDL_GL_LoadLibrary(NULL);
int min, max;
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,&max);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,&min);
printf("Default OpenGL version %d.%d\n",max,min);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,1);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,&max);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,&min);
printf("OpenGL version %d.%d\n",max,min);
SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL,1);
window=SDL_CreateWindow("", 0, 0, 500, 500, SDL_WINDOW_OPENGL);
context = SDL_GL_CreateContext(window);
glewExperimental=1;
GLenum err=glewInit();
if(GLEW_OK!=err)
{
printf("Error: %s\n", glewGetErrorString(err));
return(1);
}
if(window==NULL) {
printf("Could not create window: %s\n", SDL_GetError());
return(1);
}
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glShadeModel(GL_FLAT);
glEnableClientState(GL_VERTEX_ARRAY);
float data[][2] = {{50,50},{100,50},{75,100}};
glGenBuffers(1,&vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(data), data, GL_STATIC_DRAW);
}
void Render() {
glViewport(0,0, (GLsizei) 500, (GLsizei) 500);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0f, (GLdouble) 500, 0.0f, (GLdouble) 500);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0.0f,0.0f,0.0f);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexPointer(2, GL_FLOAT, 2*sizeof(float), 0);
glDrawArrays(GL_TRIANGLES,0,3);
SDL_GL_SwapWindow(window);
}
int main(int argc, char **argv){
Init();
Render();
SDL_Delay(2000);
glDeleteBuffers(1,&vbo);
return(0);
}
Here is some more information I have added since initially posting this question:
Using %p to print the address of glBindBuffer, it is 0.
Both (GLEW_ARB_vertex_buffer_object == GL_TRUE) and (GLEW_ARB_vertex_array_object == GL_TRUE) return 0.
glGetString(GL_RENDERER) returns "GDI Generic".
I have also tried setting the depth size to 16, but that still doesn't work.
The graphics card is an Nvidia 750 TI.

OpenGL C++ order for using a vertex buffer and a texture buffer

I'm having issues loading a texture onto my triangle strips. I'm following Anton Gerdelan's tutorial, and after failing with my main program, I went back to the basics and just tried to make a plain square and put his texture on it (the skull and crossbones).
I completely copy and pasted code from his "Hello Triangle" page, which worked, but once trying to fit in code from his texture tutorial above (and changing the triangle to a square), all I'm getting is a big white square with no texture.
I've checked the status of my shaders with glGetShaderiv() and they returned positive, I checked the image I loaded to see if the pixel data was sensible, so I believe my error is in declaring my VBOs, or the order/parameters in which I'm using them.
Here's the complete code which I copied, which compiles fine in Visual Studio 2013, except the output isn't what is expected.
I am using the static libraries of GLEW and GLFW, along with the STBI Image header
#include <GL/glew.h> // include GLEW and new version of GL on Windows
#include <GL/glfw3.h> // GLFW helper library
#include <stdio.h>
#define STB_IMAGE_IMPLEMENTATION
#include <stb/stb_image.h>
const char* vertex_shader =
"#version 400\n"
"in vec3 vp;"
"layout (location=1) in vec2 vt; // per-vertex texture co-ords"
"out vec2 texture_coordinates; "
"void main () {"
" gl_Position = vec4 (vp, 1.0);"
" texture_coordinates = vt; "
"}";
const char* fragment_shader =
"#version 400\n"
"in vec2 texture_coordinates;"
"uniform sampler2D basic_texture;"
"out vec4 frag_colour;"
"void main () {"
"vec4 texel = texture(basic_texture, texture_coordinates);"
"frag_colour = texel; "
"}";
float points[] = {
-0.5f, -0.5f, 0.0f,
-0.5f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.5f, 0.5f, 0.0f
};
float texcoords[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
};
GLFWwindow* window;
unsigned int vt_vbo;
unsigned int tex = 0;
GLuint vao = 0;
GLuint vbo = 0;
GLuint shader_programme;
void initializeGL(){
// start GL context and O/S window using the GLFW helper library
if (!glfwInit()) {
printf("ERROR: could not start GLFW3\n");
return;
}
window = glfwCreateWindow(640, 480, "Texture Test", NULL, NULL);
if (!window) {
printf("ERROR: could not open window with GLFW3\n");
glfwTerminate();
return;
}
glfwMakeContextCurrent(window);
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER); // get renderer string
const GLubyte* version = glGetString(GL_VERSION); // version as a string
printf("Renderer: %s\n", renderer);
printf("OpenGL version supported %s\n", version);
// tell GL to only draw onto a pixel if the shape is closer to the viewer
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS); // depth-testing interprets a smaller value as "closer"
}
void startShaders(){
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertex_shader, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragment_shader, NULL);
glCompileShader(fs);
shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
GLint vsstat;
glGetShaderiv(vs, GL_COMPILE_STATUS, &vsstat);
GLint fsstat;
glGetShaderiv(fs, GL_COMPILE_STATUS, &fsstat);
printf("%i\n%i\n", vsstat, fsstat);
}
void loadImage(){
int x, y, n;
int force_channels = 4;
unsigned char* image_data = stbi_load("skulluvmap.png", &x, &y, &n, force_channels);
if (!image_data) {
printf("ERROR: could not load %s\n", "skulluvmap.png");
}
int width_in_bytes = x * 4;
unsigned char *top = NULL;
unsigned char *bottom = NULL;
unsigned char temp = 0;
int half_height = y / 2;
for (int row = 0; row < half_height; row++) {
top = image_data + row * width_in_bytes;
bottom = image_data + (y - row - 1) * width_in_bytes;
for (int col = 0; col < width_in_bytes; col++) {
temp = *top;
*top = *bottom;
*bottom = temp;
top++;
bottom++;
}
}
printf("first 4 bytes are: %i %i %i %i\n",
image_data[0], image_data[1], image_data[2], image_data[3]
);
glGenTextures(1, &tex);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, x, y, 0, GL_RGBA, GL_UNSIGNED_BYTE, image_data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
void generateBuffers(){
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 12 * sizeof(float), points, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0); // don't forget this!
glGenBuffers(1, &vt_vbo);
glBindBuffer(GL_ARRAY_BUFFER, vt_vbo);
glBufferData(GL_ARRAY_BUFFER, 12 * sizeof(float), texcoords, GL_STATIC_DRAW);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1); // don't forget this!
}
void mainLoop(){
while (!glfwWindowShouldClose(window)) {
// wipe the drawing surface clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int tex_loc = glGetUniformLocation(shader_programme, "basic_texture");
glUseProgram(shader_programme);
glUniform1i(tex_loc, 0); // use active texture 0
// draw points 0-4 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// update other events like input handling
glfwPollEvents();
// put the stuff we've been drawing onto the display
glfwSwapBuffers(window);
}
}
int main() {
initializeGL();
startShaders();
loadImage();
generateBuffers();
mainLoop();
// close GL context and any other GLFW resources
glfwTerminate();
return 0;
}
You're misusing your second buffer which is supposed to be the buffer with texcoords. So what you really want to achieve is having a pair of texture coordinates for every vertex. It means that you texcoords array should in fact store 4 pairs because you have 4 triples in the points array. So that's the first fix. You probably want it to look like:
float texcoords[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0, 0.0,
1.0, 1.0,
};
Then in the generateBuffers, your vt_vbo is wrong. The data should be passed this way:
glBufferData(GL_ARRAY_BUFFER, 8 * sizeof(float), texcoords, GL_STATIC_DRAW);
because you only want to pass 8 values there. 2 texcoords for each vertex.
Edit:
This however, doesn't fully explain why your texture doesn't appear at all. I primarily thought that there might be a problem with your texcoords pointer but it doesn't seem to be the case.

Resources