I am trying to get a simple demo of OpenGL working with SDL2. I am using MacOS Big Sur 11.3.1, my SDL version is 2.0.16, and my attempted OpenGL version is 3.1.
Everything seems to work fine at first, given that SDL is initialized successfully, the SDL window is not null, and the OpenGL context is not null either. But when trying to print out the vendor name, renderer name, and version name of OpenGL, all of the strings are null:
vendor = (null)
renderer = (null)
version = (null)
This also explains why nothing appears on the window. I have followed plenty of tutorials that go over using SDL2 with OpenGL, but none of them have worked for me.
I am compiling like this: clang -O3 -lSDL2 -lGL gl_sdl.c.
If anyone knows what is going on, please let me know; I am very confused. My code is below.
#include <SDL2/SDL.h>
#include <GL/gl.h>
enum {w = 800, h = 600};
#define FAIL(msg) {fprintf(stderr, "Could not " msg "\n"); return 1;}
// https://www.khronos.org/opengl/wiki/Tutorial1:_Creating_a_Cross_Platform_OpenGL_3.2_Context_in_SDL_(C_/_SDL)
// clang -O3 -lSDL2 -lGL gl_sdl.c && ./a.out
int main(void) {
if (SDL_Init(SDL_INIT_VIDEO) < 0) FAIL("initialize SDL");
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
// Turn on double buffering with a 24bit Z buffer. You may need to change this to 16 or 32 for your system
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* const window = SDL_CreateWindow("gl_sdl", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, w, h, SDL_WINDOW_OPENGL);
if (window == NULL) FAIL("create a window");
SDL_GLContext context = SDL_GL_CreateContext(window);
if (context == NULL) FAIL("create a context");
// This makes our buffer swap syncronized with the monitor's vertical refresh
SDL_GL_SetSwapInterval(1);
printf("vendor = %s\nrenderer = %s\nversion = %s\n", glGetString(GL_VENDOR), glGetString(GL_RENDERER), glGetString(GL_VERSION));
SDL_Event event;
while (1) {
while (SDL_PollEvent(&event)) {
if (event.type == SDL_QUIT) {
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
}
// Blue
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Delay(20);
}
}
MacOS uses a different library flag for OpenGL than Linux and Windows. Instead of -lGL, use -framework OpenGL. E.g.
clang -O3 gl_sdl.c -lSDL2 -framework OpenGL
On a side note, the program requests a context for OpenGL 3.1, but does not use an OpenGL Loading Library (or loads any functions manually). As is, this happens to cause no problems, only because no functions from OpenGL >1.1 are used.
Related
I have the following code:
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#define GLFW_INCLUDE_NONE
#include <stdio.h>
#include <stdlib.h>
static void error_callback(int error, const char *description)
{
printf("Error: %s\n", description);
}
int main()
{
glfwInit();
glfwSetErrorCallback(error_callback);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
GLFWwindow *window = glfwCreateWindow(800, 600, "Learn OpenGL", NULL, NULL);
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
printf("Failed to initialize GLAD. Fatal error.\n");
return -1;
}
glfwSwapInterval(1);
while (!glfwWindowShouldClose(window))
{
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
glClearColor(0.0f, 0.5f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
When the code is set the way it is above, with GLFW_CONTEXT_VERSION set to 2.0, it works pretty well. When I resize the window, as soon as I stop resizing, it renders the new portion. However, when I set the version to 3.3 (which is the version I want to use) it lags a bit in spreading the blue across the whole window (even after I have stopped resizing it), as can be seen here: (I'm running Windows by the way)
This does not happen when the version is 2.0. Why is this, and how can I get it so it renders in 3.3 as well as it does in 2.0?
When I set GLFW_OPENGL_PROFILE to GLFW_OPENGL_COMPAT_PROFILE it works in 3.3 just like in 2.0 but on GLFW_OPENGL_CORE_PROFILE (which is what I want to use) it doesn't. How do I fix it so it works in CORE_PROFILE?
(I suspect GLFW is using some sort of deprecated function... I don't know. I'm using glfw3, so shouldn't it not have any deprecated functions though?)
Here is what worked for me. I have an Intel UHD Graphics 620 GPU, and I updated my driver, and now it works.
Old driver version was 24.something, new driver version is 30.0.101.1660.
I have a problem when I run my prog : "prog.exe just stopped working".
#include <SDL.h>
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char* argv[])
{
SDL_Surface *screen; // even with SDL2, we can still bring ancient code back
SDL_Window *window;
SDL_Surface *image;
SDL_Init(SDL_INIT_VIDEO); // init video
// create the window like normal
window = SDL_CreateWindow("SDL2 Example", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 640, 480, 0);
// but instead of creating a renderer, we can draw directly to the screen
screen = SDL_GetWindowSurface(window);
// let's just show some classic code for reference
SDL_FillRect(image, NULL, SDL_MapRGB(image->format, 0, 255, 0));
SDL_BlitSurface(image, NULL, screen, NULL); // blit it to the screen
SDL_FreeSurface(image);
// this works just like SDL_Flip() in SDL 1.2
SDL_UpdateWindowSurface(window);
// show image for 2 seconds
SDL_Delay(2000);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
// gcc src/main.c -o bin/prog -I include -L lib -lmingw32 -lSDL2main -lSDL2
As Eugene Sh. pointed out your surface isn't initialized
You need to create the surface someway, either by loading an IMG or using SDL_CreateRGBSurface. Add this before calling SDL_FillRect and now your code shows a green screen.
image = SDL_CreateRGBSurface(0, width, height, 32, 0, 0, 0, 0);
Been a while since I've needed to ask a question,
I'm trying out SDL2 and OpenGL (3.3 which is the compatibility limit with mesa) because the GLSL really interests me, however on my work machine I learned very quickly that it isn't easy to get things to work. Every tutorial I've used and even the Mesa demos themselves use headers that don't come with Ubuntu's base GL library, I've already resigned and installed GLEW but it doesn't feel right continously adding libraries to make things work, the GL headers I have are:
glcorearb.h, glew.h, glext.h, gl.h, gl_mangle.h, glu.h, glu_mangle.h,
glxew.h, glxext.h, glx.h, glxint.h, glx_mangle.h, glxmd.h, glxproto.h,
glxtokens.h, wglew.h
I tried following LazyFoo's tutorials but did not have the same result of getting a white quad to appear. I followed opengl-tutorial's tutorials and did not get the same result of a white triangle to appear (it mentions to not worry if you don't see it at first, but doesn't explain what to do in the case that it doesn't (I tried to follow the rest of the tutorial, but I'm writing this in C and not C++ so I'm worried about straying too far from the tutorials' and confusing the issue further. I've installed SDL2 and made sure I had everything. This is the code I have in my current SDL2/GL program, it does not at all reveal a white triangle, it's a combination of tutorials, but I've read all the SDL API material I could to make sure nothing on the SDL side affects what GL tries to do.
#define SDL_ASSERT_LEVEL 3
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_error.h>
#include <SDL2/SDL_assert.h>
#include <SDL2/SDL_version.h>
#include <SDL2/SDL_events.h>
#include <GL/gl.h>
int main(){
SDL_version compiledWith, linkedWith;
SDL_VERSION(&compiledWith);
SDL_GetVersion(&linkedWith);
if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO) < 0){
fprintf(stderr, "\nUnable to initialize SDL: %s\n", SDL_GetError());
exit(1);
}
SDL_Log("\nCompiled with: %d.%d.%d\n", compiledWith.major,
compiledWith.minor, compiledWith.patch);
SDL_Log("\nLinked with: %d.%d.%d\n", linkedWith.major,
linkedWith.minor, linkedWith.patch);
SDL_Window* window = SDL_CreateWindow("SDL2/OpenGL Demo", 0, 0, 640, 480,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);
//Can now make GL calls after the below line
SDL_GLContext glContext = SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
static const GLfloat gVertexBufferData[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertexBufferData),
gVertexBufferData, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Event theEvent;
bool running = true;
while(running){
while(SDL_PollEvent(&theEvent)){
switch(theEvent.type){
case SDL_QUIT:
SDL_Log("\nQuit request acknowledged\n");
//Finish up GL usage
SDL_GL_DeleteContext(glContext);
//Finish up SDL usage
SDL_Quit();
running = false;
break;
default:
break;
}
}
}
return 0;
}
I use gcc main.c -lSDL2 -lGL -o test for linking, I suspect I might be missing linkage libraries but I'm not sure where I can check if I am or not, the compiler doesn't warn me about anything it can't find unless I follow a tutorial that uses something I don't have.
To conclude since this is a longer post than anticipated, the question is:
Am I missing any important library to actually get this to work on my system (Ubuntu 15.04 Intel Haswell Mobile x86/MMX/SSE2)?
Did I miss something in my code that is necessary to see the white triangle?
This OpenGL program is written to draw a triangle and it got crashed.
Basically a set of triangle vertices is define as array named coordinate, then this array is passed to a buffer and the method glDrawArrays will draw the triangle based on the mode GL_TRIANGLES start from vertex 0, 3 vertices in total.
Am I right? And could someone show me where is the error? Here is the code:
// Open an OpenGL window
GLFWwindow* window;
int k = 0;
/****Step 1: define vertices in (x, y, z) form****/
const GLfloat coordinates[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
/************************/
/**Step 2: send this triangle vertices to OpenGL through a buffer**/
GLuint vertexBuffer; // identify vertex buffer
void Render(void){
/************************/
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(0, 3 /*size*/, GL_FLOAT /*type*/, GL_FALSE, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDisableVertexAttribArray(0);
/************************/
// glClearColor(0., 1., 1., 1.); // blue colour
glClear( GL_COLOR_BUFFER_BIT );
// Swap front and back rendering buffers
glfwSwapBuffers(window);
//Poll for and process events
glfwPollEvents();
}
int main( void ) {
/*Initializing steps here*/
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(700, 500, "Hello World", NULL, NULL);
// Make the window's context current
glfwMakeContextCurrent(window);
/**Step 2: send this triangle vertices to OpenGL through a buffer**/
glGenBuffers(1, &vertexBuffer); // generating 1 buffer, put resulting identifier in this buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(coordinates), coordinates, GL_STATIC_DRAW);
/************************/
// Main loop
while( glfwWindowShouldClose(window) == 0) {
// OpenGL rendering goes here...
Render();
}
// Close window and terminate GLFW
glfwDestroyWindow(window);
glfwTerminate();
// Exit program
exit( EXIT_SUCCESS );
}
EDIT
Here is a screenshot of the output:
UPDATE
I found the error, this is because I create the OpenGL context after initializing GLEW. It causes program to crash.
Ok, first of all you should have provided more details, but I figured them by myself. Here is it:
Missed headers:
#include <stdlib.h>
#include <stdio.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
Makefile:
CFLAGS = -Wall -O2
CFLAGS += $(shell pkg-config --cflags glfw3)
CFLAGS += $(shell pkg-config --cflags glew)
LDFLAGS += $(shell pkg-config --libs glfw3)
LDFLAGS += $(shell pkg-config --libs glew)
SOURCES = main.c
OUT = gl-test
CC = gcc
default: $(SOURCES)
$(CC) $(CFLAGS) $(LDFLAGS) $(SOURCES) -o $(OUT)
clean:
-rm -f $(OUT)
.PHONY: default clean
Error occured: Segmentation fault, in next command:
glGenBuffers(1, &vertexBuffer);
But what I see is that glfwCreateWindow() returns NULL, so the error is already in very first instruction. You can figure out why this error happend like this:
static void error_callback(int error, const char *description)
{
puts(description);
}
int main(void)
{
glfwSetErrorCallback(error_callback);
...
}
And also it worth to modify window creating code to be like that:
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(700, 500, "Hello World", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Error in glfwCreateWindow\n");
return EXIT_FAILURE;
}
After this you can see next output in my terminal:
The GLFW library is not initialized
Error in glfwCreateWindow
Please try to do the same and confirm that you have the same problem. If no, please provide output from your terminal.
EDIT 1:
Ok, it seems like first of all you need to do the next just in the main() beginning:
if (!glfwInit()) {
fprintf(stderr, "Error on GLWF init\n");
return EXIT_FAILURE;
}
It solves previous error, but I still have segmentation fault error.
EDIT 2:
Second error is that GLEW wasn't initialized. It seems GLEW init must be put when window is already created, like this:
// Make the window's context current
glfwMakeContextCurrent(window);
glewinit_res = glewInit();
if (glewinit_res != GLEW_OK) {
fprintf(stderr, "Glew init error: %s\n",
glewGetErrorString(glewinit_res));
return EXIT_FAILURE;
}
Now I don't have any runtime errors, window shows up, but it only has black background and nothing on it. Anyway, I believe original error is fixed.
EDIT 3:
If you want to see your triangles, just remove glClear() function call.
Here is what I see:
.
Here is modified code that works for me (main.c file):
http://pastebin.com/sx5nMQHc
This question already has answers here:
How do I create an OpenGL 3.3 context in GLFW 3
(1 answer)
Simple triangle using OpenGL and GLFW [duplicate]
(1 answer)
Closed 8 years ago.
I'm following the http://opengl-tutorials.org tutorial series, and came up with a very basic code for drawing a white triangle on screen, but I can't seem to get it to work.
I've compiled glfw without any additional options using appleshell's answer here: OpenGL 3.3/4.1 on Mac OSX 10.9 using GLFW library
I created a simple source code based on the site instructions:
#include <stdio.h>
#include <stdlib.h>
#include <glfw3.h>
// Need gl3.h for glGenVertexArrays() and glBindVertexArray()
#include <OpenGL/gl3.h>
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
#define printGlErr(func) err = glGetError(); if (err) fprintf(stderr, func " error: %u at line %d\n", err, __LINE__);
GLuint err;
int main(int argc, const char * argv[]) {
if (!glfwInit()) {
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window;
window = glfwCreateWindow(1024, 768, "Triangle", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
// Init stuff
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
printGlErr("glGenBuffers()");
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
printGlErr("glBindBuffer()");
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
printGlErr("glBufferData()");
do {
// Draw stuff
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
printGlErr("glVertexAttribPointer()");
glDrawArrays(GL_TRIANGLES, 0, 3);
printGlErr("glDrawArrays()");
glDisableVertexAttribArray(0);
printGlErr("glDisableVertexAttribArray()");
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} while(glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS && glfwWindowShouldClose(window) == 0); // Check if the ESC key was pressed or the window was closed
printf("Goodbye!\n");
return(0);
}
I can't seem to get the code to work. I'm getting a GL_INVALID_OPERATION on the glDrawArrays() function call.
The code renders a black window on Mavericks and on Yosemite. This is my hardware specs: http://support.apple.com/kb/SP653
It does not crash, it exits on ESC, but nothing else happens (other than spamming the error code on the console). The results are the same using any of the 2 available video boards.
Also, I'm compiling the source code using the following line on terminal:
gcc main.c -o triangle -framework OpenGl -framework Cocoa -framework IOKit -framework CoreVideo -I../glfw/include/GLFW -L../glfw/src -lglfw3