Why does GLFW_CONTEXT_VERSION 3.3 render slower than 2.0? - c

I have the following code:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#define GLFW_INCLUDE_NONE
#include <stdio.h>
#include <stdlib.h>
static void error_callback(int error, const char *description)
{
printf("Error: %s\n", description);
}
int main()
{
glfwInit();
glfwSetErrorCallback(error_callback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
GLFWwindow *window = glfwCreateWindow(800, 600, "Learn OpenGL", NULL, NULL);
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
printf("Failed to initialize GLAD. Fatal error.\n");
return -1;
}
glfwSwapInterval(1);
while (!glfwWindowShouldClose(window))
{
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
glClearColor(0.0f, 0.5f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
When the code is set the way it is above, with GLFW_CONTEXT_VERSION set to 2.0, it works pretty well. When I resize the window, as soon as I stop resizing, it renders the new portion. However, when I set the version to 3.3 (which is the version I want to use) it lags a bit in spreading the blue across the whole window (even after I have stopped resizing it), as can be seen here: (I'm running Windows by the way)
This does not happen when the version is 2.0. Why is this, and how can I get it so it renders in 3.3 as well as it does in 2.0?
When I set GLFW_OPENGL_PROFILE to GLFW_OPENGL_COMPAT_PROFILE it works in 3.3 just like in 2.0 but on GLFW_OPENGL_CORE_PROFILE (which is what I want to use) it doesn't. How do I fix it so it works in CORE_PROFILE?
(I suspect GLFW is using some sort of deprecated function... I don't know. I'm using glfw3, so shouldn't it not have any deprecated functions though?)

Here is what worked for me. I have an Intel UHD Graphics 620 GPU, and I updated my driver, and now it works.
Old driver version was 24.something, new driver version is 30.0.101.1660.

Related

OpenGL 3.1 with SDL2 context fails to output anything on screen

I am trying to get a simple demo of OpenGL working with SDL2. I am using MacOS Big Sur 11.3.1, my SDL version is 2.0.16, and my attempted OpenGL version is 3.1.
Everything seems to work fine at first, given that SDL is initialized successfully, the SDL window is not null, and the OpenGL context is not null either. But when trying to print out the vendor name, renderer name, and version name of OpenGL, all of the strings are null:
vendor = (null)
renderer = (null)
version = (null)
This also explains why nothing appears on the window. I have followed plenty of tutorials that go over using SDL2 with OpenGL, but none of them have worked for me.
I am compiling like this: clang -O3 -lSDL2 -lGL gl_sdl.c.
If anyone knows what is going on, please let me know; I am very confused. My code is below.
#include <SDL2/SDL.h>
#include <GL/gl.h>
enum {w = 800, h = 600};
#define FAIL(msg) {fprintf(stderr, "Could not " msg "\n"); return 1;}
// https://www.khronos.org/opengl/wiki/Tutorial1:_Creating_a_Cross_Platform_OpenGL_3.2_Context_in_SDL_(C_/_SDL)
// clang -O3 -lSDL2 -lGL gl_sdl.c && ./a.out
int main(void) {
if (SDL_Init(SDL_INIT_VIDEO) < 0) FAIL("initialize SDL");
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
// Turn on double buffering with a 24bit Z buffer. You may need to change this to 16 or 32 for your system
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* const window = SDL_CreateWindow("gl_sdl", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, w, h, SDL_WINDOW_OPENGL);
if (window == NULL) FAIL("create a window");
SDL_GLContext context = SDL_GL_CreateContext(window);
if (context == NULL) FAIL("create a context");
// This makes our buffer swap syncronized with the monitor's vertical refresh
SDL_GL_SetSwapInterval(1);
printf("vendor = %s\nrenderer = %s\nversion = %s\n", glGetString(GL_VENDOR), glGetString(GL_RENDERER), glGetString(GL_VERSION));
SDL_Event event;
while (1) {
while (SDL_PollEvent(&event)) {
if (event.type == SDL_QUIT) {
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
}
// Blue
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Delay(20);
}
}
MacOS uses a different library flag for OpenGL than Linux and Windows. Instead of -lGL, use -framework OpenGL. E.g.
clang -O3 gl_sdl.c -lSDL2 -framework OpenGL
On a side note, the program requests a context for OpenGL 3.1, but does not use an OpenGL Loading Library (or loads any functions manually). As is, this happens to cause no problems, only because no functions from OpenGL >1.1 are used.

OpenGL+GLFW glGenVertexArrays returns GL_INVALID_OPERATION

In trying to move into using "modern" OpenGL (basically 3.2+), I've run into some troubles running basic code (derived from both here and here) using GLFW, GLEW, and OpenGL.
My first problem is that with the below code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"void main()"
"{"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"out vec4 outColor;"
"void main()"
"{"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
void checkErr(const char* msg) {
GLenum err = glGetError();
if (err != 0) {
printf("# \"%s\": %d\n", msg, err);
exit(EXIT_FAILURE);
} else {
printf("# \"%s\": successful\n", msg);
}
}
int main(int argc, char* argv[]) {
GLFWwindow* window;
// Initialize GLFW
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
// Make the window's context current
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
const GLubyte* glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
printf ("Renderer: %s\n", renderer);
printf ("OpenGL version: %s\n", version);
printf ("GLSL version: %s\n", glslVersion);
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
checkErr("Gen VAO");
glBindVertexArray(vao);
checkErr("Bind VAO");
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
checkErr("Gen VBO");
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
checkErr("Bind VBO");
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
checkErr("VBO data");
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
checkErr("Compile vert shader");
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
checkErr("Compile frag shader");
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
checkErr("Link program");
glUseProgram(shaderProgram);
checkErr("Use program");
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
checkErr("Enable vertex attrib");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
checkErr("Describe vert data");
// Loop until the user closes the window
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
I'm immediately running into GL_INVALID_OPERATION errors on the very first step of setting up the vertex array object.
I've done a fair bit of research regarding OS X's flimsy OpenGL support, but so far most of the things I've modified in this code has done nothing more but produce a completely black screen (that is, when I removed the crashing behavior of the checkError helper function).
For reference, I'm running on an early 2015 MacBook Pro w/Retina, OS X v10.11.3, and the output of the version info from my above program lists as follows:
Renderer: Intel(R) Iris(TM) Graphics 6100
OpenGL version: 4.1 INTEL-10.12.13
GLSL version: 4.10
Any help is greatly appreciated!
You just assumed that your error was generated by glGenVertexArrays. But that is not the case. It is generated by glewInit. And this is because GLEW is just broken on core profile opengl: it uses glGetString(GL_EXTENSIONS) to query the extension string, which is not available in core prodiles and generates a GL_INVALID_ENUM error (1280).
Normally, glewInit will then abort with return code GL_FALSE. However, the "workaround" of setting glewExperimental=GL_TRUE will make it going on, ignoring the error, and querying all the extension pointers anyway. This is now broken in at least 3 different regards:
All the GLEW variables for querying the avialibility of specific extensions return false even when the extension is available.
It will retrieve function pointers for extension functions whose availibility hasn't been advertised by the implementation. It is not guaranteed that those pointers will be NULL, but calling these would be undefined behavior. Together with 1, this means you have no way of checking the availibility of any extension, except by manually doing the stuff glew is actually there for to do for you.
It will leave the GL context in an error state.
As a quick & dirty hack, you can just add a glGetError() right after glewInit, to read the error away. After I did that, your code produced the expected white triangle on my implementation (NVIDIA/Linux).
A better fix is probably to switch over to another GL loader which does properly work with core profiles, for example glad. Switching over will not be really hard, as only that init function has to be replaced. Note that glad is not a loader library, but a python script which generates a loader source file for your needs, so you don't need to link another library, but just have to add another source file to your project.

Ubuntu 15.04 Mesa and SDL implementation

Been a while since I've needed to ask a question,
I'm trying out SDL2 and OpenGL (3.3 which is the compatibility limit with mesa) because the GLSL really interests me, however on my work machine I learned very quickly that it isn't easy to get things to work. Every tutorial I've used and even the Mesa demos themselves use headers that don't come with Ubuntu's base GL library, I've already resigned and installed GLEW but it doesn't feel right continously adding libraries to make things work, the GL headers I have are:
glcorearb.h, glew.h, glext.h, gl.h, gl_mangle.h, glu.h, glu_mangle.h,
glxew.h, glxext.h, glx.h, glxint.h, glx_mangle.h, glxmd.h, glxproto.h,
glxtokens.h, wglew.h
I tried following LazyFoo's tutorials but did not have the same result of getting a white quad to appear. I followed opengl-tutorial's tutorials and did not get the same result of a white triangle to appear (it mentions to not worry if you don't see it at first, but doesn't explain what to do in the case that it doesn't (I tried to follow the rest of the tutorial, but I'm writing this in C and not C++ so I'm worried about straying too far from the tutorials' and confusing the issue further. I've installed SDL2 and made sure I had everything. This is the code I have in my current SDL2/GL program, it does not at all reveal a white triangle, it's a combination of tutorials, but I've read all the SDL API material I could to make sure nothing on the SDL side affects what GL tries to do.
#define SDL_ASSERT_LEVEL 3
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_error.h>
#include <SDL2/SDL_assert.h>
#include <SDL2/SDL_version.h>
#include <SDL2/SDL_events.h>
#include <GL/gl.h>
int main(){
SDL_version compiledWith, linkedWith;
SDL_VERSION(&compiledWith);
SDL_GetVersion(&linkedWith);
if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO) < 0){
fprintf(stderr, "\nUnable to initialize SDL: %s\n", SDL_GetError());
exit(1);
}
SDL_Log("\nCompiled with: %d.%d.%d\n", compiledWith.major,
compiledWith.minor, compiledWith.patch);
SDL_Log("\nLinked with: %d.%d.%d\n", linkedWith.major,
linkedWith.minor, linkedWith.patch);
SDL_Window* window = SDL_CreateWindow("SDL2/OpenGL Demo", 0, 0, 640, 480,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);
//Can now make GL calls after the below line
SDL_GLContext glContext = SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
static const GLfloat gVertexBufferData[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertexBufferData),
gVertexBufferData, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Event theEvent;
bool running = true;
while(running){
while(SDL_PollEvent(&theEvent)){
switch(theEvent.type){
case SDL_QUIT:
SDL_Log("\nQuit request acknowledged\n");
//Finish up GL usage
SDL_GL_DeleteContext(glContext);
//Finish up SDL usage
SDL_Quit();
running = false;
break;
default:
break;
}
}
}
return 0;
}
I use gcc main.c -lSDL2 -lGL -o test for linking, I suspect I might be missing linkage libraries but I'm not sure where I can check if I am or not, the compiler doesn't warn me about anything it can't find unless I follow a tutorial that uses something I don't have.
To conclude since this is a longer post than anticipated, the question is:
Am I missing any important library to actually get this to work on my system (Ubuntu 15.04 Intel Haswell Mobile x86/MMX/SSE2)?
Did I miss something in my code that is necessary to see the white triangle?

OpenGL 3.3 + OSX + GLFW Does not work [duplicate]

This question already has answers here:
How do I create an OpenGL 3.3 context in GLFW 3
(1 answer)
Simple triangle using OpenGL and GLFW [duplicate]
(1 answer)
Closed 8 years ago.
I'm following the http://opengl-tutorials.org tutorial series, and came up with a very basic code for drawing a white triangle on screen, but I can't seem to get it to work.
I've compiled glfw without any additional options using appleshell's answer here: OpenGL 3.3/4.1 on Mac OSX 10.9 using GLFW library
I created a simple source code based on the site instructions:
#include <stdio.h>
#include <stdlib.h>
#include <glfw3.h>
// Need gl3.h for glGenVertexArrays() and glBindVertexArray()
#include <OpenGL/gl3.h>
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
#define printGlErr(func) err = glGetError(); if (err) fprintf(stderr, func " error: %u at line %d\n", err, __LINE__);
GLuint err;
int main(int argc, const char * argv[]) {
if (!glfwInit()) {
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window;
window = glfwCreateWindow(1024, 768, "Triangle", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
// Init stuff
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
printGlErr("glGenBuffers()");
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
printGlErr("glBindBuffer()");
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
printGlErr("glBufferData()");
do {
// Draw stuff
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
printGlErr("glVertexAttribPointer()");
glDrawArrays(GL_TRIANGLES, 0, 3);
printGlErr("glDrawArrays()");
glDisableVertexAttribArray(0);
printGlErr("glDisableVertexAttribArray()");
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} while(glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS && glfwWindowShouldClose(window) == 0); // Check if the ESC key was pressed or the window was closed
printf("Goodbye!\n");
return(0);
}
I can't seem to get the code to work. I'm getting a GL_INVALID_OPERATION on the glDrawArrays() function call.
The code renders a black window on Mavericks and on Yosemite. This is my hardware specs: http://support.apple.com/kb/SP653
It does not crash, it exits on ESC, but nothing else happens (other than spamming the error code on the console). The results are the same using any of the 2 available video boards.
Also, I'm compiling the source code using the following line on terminal:
gcc main.c -o triangle -framework OpenGl -framework Cocoa -framework IOKit -framework CoreVideo -I../glfw/include/GLFW -L../glfw/src -lglfw3

GLFW3 Error - "WGL: Failed to find a suitable pixel format"

I'm trying to get GLFW3 working on my Windows 8 machine with MinGW, but with little success. I have this test program copied straight from the GLFW documentation (with a bit of editing to make it work with the shared library) that I'm trying to run:
#define GLFW_DLL
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
static void error_callback(int error, const char* description)
{
fputs(description, stderr);
}
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
int main(void)
{
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
exit(EXIT_FAILURE);
window = glfwCreateWindow(640, 480, "Simple example", NULL, NULL);
if (!window)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, key_callback);
while (!glfwWindowShouldClose(window))
{
float ratio;
int width, height;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / (float) height;
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-ratio, ratio, -1.f, 1.f, 1.f, -1.f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glRotatef((float) glfwGetTime() * 50.f, 0.f, 0.f, 1.f);
glBegin(GL_TRIANGLES);
glColor3f(1.f, 0.f, 0.f);
glVertex3f(-0.6f, -0.4f, 0.f);
glColor3f(0.f, 1.f, 0.f);
glVertex3f(0.6f, -0.4f, 0.f);
glColor3f(0.f, 0.f, 1.f);
glVertex3f(0.f, 0.6f, 0.f);
glEnd();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}
Like I said, I compiled GLFW as a shared library, and I have the glfw3.dll file in the same folder as the rest of my code. It compiles fine, but when I run the program I get a "WGL: Failed to find a suitable pixel format" error. Apparently, this can have to do with video card drivers, but I've updated mine, and I still get the same error.
It might help to know that I have an Nvidia 720M and an Intel HD Graphics 4000 card, with Optimus switching between the two. I tried running the program with both of them and got the same error.
EDIT: I should probably also show you how I'm building it
gl-test: main.c
gcc -o gl-test main.c -lopengl32 -lglfw3dll
It turns out my problem didn't have to do with my drivers at all. Instead, I was simply compiling GLFW3 incorrectly.
My suggestion to those facing the same problem as me: Remember to actually build GLFW as a shared library if you want it that way! The variable is called BUILD_SHARED_LIBS. If you're using cmake-gui, changing that is pretty simple (Just make sure to press configure again to save your changes, which is what I didn't do). Otherwise, you need to add -DBUILD_SHARED_LIBS=on when running cmake in the terminal.

Resources