OpenGL application crashing - c

This OpenGL program is written to draw a triangle and it got crashed.
Basically a set of triangle vertices is define as array named coordinate, then this array is passed to a buffer and the method glDrawArrays will draw the triangle based on the mode GL_TRIANGLES start from vertex 0, 3 vertices in total.
Am I right? And could someone show me where is the error? Here is the code:
// Open an OpenGL window
GLFWwindow* window;
int k = 0;
/****Step 1: define vertices in (x, y, z) form****/
const GLfloat coordinates[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
/************************/
/**Step 2: send this triangle vertices to OpenGL through a buffer**/
GLuint vertexBuffer; // identify vertex buffer
void Render(void){
/************************/
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(0, 3 /*size*/, GL_FLOAT /*type*/, GL_FALSE, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDisableVertexAttribArray(0);
/************************/
// glClearColor(0., 1., 1., 1.); // blue colour
glClear( GL_COLOR_BUFFER_BIT );
// Swap front and back rendering buffers
glfwSwapBuffers(window);
//Poll for and process events
glfwPollEvents();
}
int main( void ) {
/*Initializing steps here*/
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(700, 500, "Hello World", NULL, NULL);
// Make the window's context current
glfwMakeContextCurrent(window);
/**Step 2: send this triangle vertices to OpenGL through a buffer**/
glGenBuffers(1, &vertexBuffer); // generating 1 buffer, put resulting identifier in this buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(coordinates), coordinates, GL_STATIC_DRAW);
/************************/
// Main loop
while( glfwWindowShouldClose(window) == 0) {
// OpenGL rendering goes here...
Render();
}
// Close window and terminate GLFW
glfwDestroyWindow(window);
glfwTerminate();
// Exit program
exit( EXIT_SUCCESS );
}
EDIT
Here is a screenshot of the output:
UPDATE
I found the error, this is because I create the OpenGL context after initializing GLEW. It causes program to crash.

Ok, first of all you should have provided more details, but I figured them by myself. Here is it:
Missed headers:
#include <stdlib.h>
#include <stdio.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
Makefile:
CFLAGS = -Wall -O2
CFLAGS += $(shell pkg-config --cflags glfw3)
CFLAGS += $(shell pkg-config --cflags glew)
LDFLAGS += $(shell pkg-config --libs glfw3)
LDFLAGS += $(shell pkg-config --libs glew)
SOURCES = main.c
OUT = gl-test
CC = gcc
default: $(SOURCES)
$(CC) $(CFLAGS) $(LDFLAGS) $(SOURCES) -o $(OUT)
clean:
-rm -f $(OUT)
.PHONY: default clean
Error occured: Segmentation fault, in next command:
glGenBuffers(1, &vertexBuffer);
But what I see is that glfwCreateWindow() returns NULL, so the error is already in very first instruction. You can figure out why this error happend like this:
static void error_callback(int error, const char *description)
{
puts(description);
}
int main(void)
{
glfwSetErrorCallback(error_callback);
...
}
And also it worth to modify window creating code to be like that:
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(700, 500, "Hello World", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Error in glfwCreateWindow\n");
return EXIT_FAILURE;
}
After this you can see next output in my terminal:
The GLFW library is not initialized
Error in glfwCreateWindow
Please try to do the same and confirm that you have the same problem. If no, please provide output from your terminal.
EDIT 1:
Ok, it seems like first of all you need to do the next just in the main() beginning:
if (!glfwInit()) {
fprintf(stderr, "Error on GLWF init\n");
return EXIT_FAILURE;
}
It solves previous error, but I still have segmentation fault error.
EDIT 2:
Second error is that GLEW wasn't initialized. It seems GLEW init must be put when window is already created, like this:
// Make the window's context current
glfwMakeContextCurrent(window);
glewinit_res = glewInit();
if (glewinit_res != GLEW_OK) {
fprintf(stderr, "Glew init error: %s\n",
glewGetErrorString(glewinit_res));
return EXIT_FAILURE;
}
Now I don't have any runtime errors, window shows up, but it only has black background and nothing on it. Anyway, I believe original error is fixed.
EDIT 3:
If you want to see your triangles, just remove glClear() function call.
Here is what I see:
.
Here is modified code that works for me (main.c file):
http://pastebin.com/sx5nMQHc

Related

OpenGL 3.1 with SDL2 context fails to output anything on screen

I am trying to get a simple demo of OpenGL working with SDL2. I am using MacOS Big Sur 11.3.1, my SDL version is 2.0.16, and my attempted OpenGL version is 3.1.
Everything seems to work fine at first, given that SDL is initialized successfully, the SDL window is not null, and the OpenGL context is not null either. But when trying to print out the vendor name, renderer name, and version name of OpenGL, all of the strings are null:
vendor = (null)
renderer = (null)
version = (null)
This also explains why nothing appears on the window. I have followed plenty of tutorials that go over using SDL2 with OpenGL, but none of them have worked for me.
I am compiling like this: clang -O3 -lSDL2 -lGL gl_sdl.c.
If anyone knows what is going on, please let me know; I am very confused. My code is below.
#include <SDL2/SDL.h>
#include <GL/gl.h>
enum {w = 800, h = 600};
#define FAIL(msg) {fprintf(stderr, "Could not " msg "\n"); return 1;}
// https://www.khronos.org/opengl/wiki/Tutorial1:_Creating_a_Cross_Platform_OpenGL_3.2_Context_in_SDL_(C_/_SDL)
// clang -O3 -lSDL2 -lGL gl_sdl.c && ./a.out
int main(void) {
if (SDL_Init(SDL_INIT_VIDEO) < 0) FAIL("initialize SDL");
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
// Turn on double buffering with a 24bit Z buffer. You may need to change this to 16 or 32 for your system
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* const window = SDL_CreateWindow("gl_sdl", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, w, h, SDL_WINDOW_OPENGL);
if (window == NULL) FAIL("create a window");
SDL_GLContext context = SDL_GL_CreateContext(window);
if (context == NULL) FAIL("create a context");
// This makes our buffer swap syncronized with the monitor's vertical refresh
SDL_GL_SetSwapInterval(1);
printf("vendor = %s\nrenderer = %s\nversion = %s\n", glGetString(GL_VENDOR), glGetString(GL_RENDERER), glGetString(GL_VERSION));
SDL_Event event;
while (1) {
while (SDL_PollEvent(&event)) {
if (event.type == SDL_QUIT) {
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
}
// Blue
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Delay(20);
}
}
MacOS uses a different library flag for OpenGL than Linux and Windows. Instead of -lGL, use -framework OpenGL. E.g.
clang -O3 gl_sdl.c -lSDL2 -framework OpenGL
On a side note, the program requests a context for OpenGL 3.1, but does not use an OpenGL Loading Library (or loads any functions manually). As is, this happens to cause no problems, only because no functions from OpenGL >1.1 are used.

OpenGL+GLFW glGenVertexArrays returns GL_INVALID_OPERATION

In trying to move into using "modern" OpenGL (basically 3.2+), I've run into some troubles running basic code (derived from both here and here) using GLFW, GLEW, and OpenGL.
My first problem is that with the below code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"void main()"
"{"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"out vec4 outColor;"
"void main()"
"{"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
void checkErr(const char* msg) {
GLenum err = glGetError();
if (err != 0) {
printf("# \"%s\": %d\n", msg, err);
exit(EXIT_FAILURE);
} else {
printf("# \"%s\": successful\n", msg);
}
}
int main(int argc, char* argv[]) {
GLFWwindow* window;
// Initialize GLFW
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
// Make the window's context current
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
const GLubyte* glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
printf ("Renderer: %s\n", renderer);
printf ("OpenGL version: %s\n", version);
printf ("GLSL version: %s\n", glslVersion);
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
checkErr("Gen VAO");
glBindVertexArray(vao);
checkErr("Bind VAO");
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
checkErr("Gen VBO");
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
checkErr("Bind VBO");
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
checkErr("VBO data");
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
checkErr("Compile vert shader");
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
checkErr("Compile frag shader");
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
checkErr("Link program");
glUseProgram(shaderProgram);
checkErr("Use program");
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
checkErr("Enable vertex attrib");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
checkErr("Describe vert data");
// Loop until the user closes the window
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
I'm immediately running into GL_INVALID_OPERATION errors on the very first step of setting up the vertex array object.
I've done a fair bit of research regarding OS X's flimsy OpenGL support, but so far most of the things I've modified in this code has done nothing more but produce a completely black screen (that is, when I removed the crashing behavior of the checkError helper function).
For reference, I'm running on an early 2015 MacBook Pro w/Retina, OS X v10.11.3, and the output of the version info from my above program lists as follows:
Renderer: Intel(R) Iris(TM) Graphics 6100
OpenGL version: 4.1 INTEL-10.12.13
GLSL version: 4.10
Any help is greatly appreciated!
You just assumed that your error was generated by glGenVertexArrays. But that is not the case. It is generated by glewInit. And this is because GLEW is just broken on core profile opengl: it uses glGetString(GL_EXTENSIONS) to query the extension string, which is not available in core prodiles and generates a GL_INVALID_ENUM error (1280).
Normally, glewInit will then abort with return code GL_FALSE. However, the "workaround" of setting glewExperimental=GL_TRUE will make it going on, ignoring the error, and querying all the extension pointers anyway. This is now broken in at least 3 different regards:
All the GLEW variables for querying the avialibility of specific extensions return false even when the extension is available.
It will retrieve function pointers for extension functions whose availibility hasn't been advertised by the implementation. It is not guaranteed that those pointers will be NULL, but calling these would be undefined behavior. Together with 1, this means you have no way of checking the availibility of any extension, except by manually doing the stuff glew is actually there for to do for you.
It will leave the GL context in an error state.
As a quick & dirty hack, you can just add a glGetError() right after glewInit, to read the error away. After I did that, your code produced the expected white triangle on my implementation (NVIDIA/Linux).
A better fix is probably to switch over to another GL loader which does properly work with core profiles, for example glad. Switching over will not be really hard, as only that init function has to be replaced. Note that glad is not a loader library, but a python script which generates a loader source file for your needs, so you don't need to link another library, but just have to add another source file to your project.

Ubuntu 15.04 Mesa and SDL implementation

Been a while since I've needed to ask a question,
I'm trying out SDL2 and OpenGL (3.3 which is the compatibility limit with mesa) because the GLSL really interests me, however on my work machine I learned very quickly that it isn't easy to get things to work. Every tutorial I've used and even the Mesa demos themselves use headers that don't come with Ubuntu's base GL library, I've already resigned and installed GLEW but it doesn't feel right continously adding libraries to make things work, the GL headers I have are:
glcorearb.h, glew.h, glext.h, gl.h, gl_mangle.h, glu.h, glu_mangle.h,
glxew.h, glxext.h, glx.h, glxint.h, glx_mangle.h, glxmd.h, glxproto.h,
glxtokens.h, wglew.h
I tried following LazyFoo's tutorials but did not have the same result of getting a white quad to appear. I followed opengl-tutorial's tutorials and did not get the same result of a white triangle to appear (it mentions to not worry if you don't see it at first, but doesn't explain what to do in the case that it doesn't (I tried to follow the rest of the tutorial, but I'm writing this in C and not C++ so I'm worried about straying too far from the tutorials' and confusing the issue further. I've installed SDL2 and made sure I had everything. This is the code I have in my current SDL2/GL program, it does not at all reveal a white triangle, it's a combination of tutorials, but I've read all the SDL API material I could to make sure nothing on the SDL side affects what GL tries to do.
#define SDL_ASSERT_LEVEL 3
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_error.h>
#include <SDL2/SDL_assert.h>
#include <SDL2/SDL_version.h>
#include <SDL2/SDL_events.h>
#include <GL/gl.h>
int main(){
SDL_version compiledWith, linkedWith;
SDL_VERSION(&compiledWith);
SDL_GetVersion(&linkedWith);
if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO) < 0){
fprintf(stderr, "\nUnable to initialize SDL: %s\n", SDL_GetError());
exit(1);
}
SDL_Log("\nCompiled with: %d.%d.%d\n", compiledWith.major,
compiledWith.minor, compiledWith.patch);
SDL_Log("\nLinked with: %d.%d.%d\n", linkedWith.major,
linkedWith.minor, linkedWith.patch);
SDL_Window* window = SDL_CreateWindow("SDL2/OpenGL Demo", 0, 0, 640, 480,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);
//Can now make GL calls after the below line
SDL_GLContext glContext = SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
static const GLfloat gVertexBufferData[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertexBufferData),
gVertexBufferData, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Event theEvent;
bool running = true;
while(running){
while(SDL_PollEvent(&theEvent)){
switch(theEvent.type){
case SDL_QUIT:
SDL_Log("\nQuit request acknowledged\n");
//Finish up GL usage
SDL_GL_DeleteContext(glContext);
//Finish up SDL usage
SDL_Quit();
running = false;
break;
default:
break;
}
}
}
return 0;
}
I use gcc main.c -lSDL2 -lGL -o test for linking, I suspect I might be missing linkage libraries but I'm not sure where I can check if I am or not, the compiler doesn't warn me about anything it can't find unless I follow a tutorial that uses something I don't have.
To conclude since this is a longer post than anticipated, the question is:
Am I missing any important library to actually get this to work on my system (Ubuntu 15.04 Intel Haswell Mobile x86/MMX/SSE2)?
Did I miss something in my code that is necessary to see the white triangle?

OpenGL 3.3 + OSX + GLFW Does not work [duplicate]

This question already has answers here:
How do I create an OpenGL 3.3 context in GLFW 3
(1 answer)
Simple triangle using OpenGL and GLFW [duplicate]
(1 answer)
Closed 8 years ago.
I'm following the http://opengl-tutorials.org tutorial series, and came up with a very basic code for drawing a white triangle on screen, but I can't seem to get it to work.
I've compiled glfw without any additional options using appleshell's answer here: OpenGL 3.3/4.1 on Mac OSX 10.9 using GLFW library
I created a simple source code based on the site instructions:
#include <stdio.h>
#include <stdlib.h>
#include <glfw3.h>
// Need gl3.h for glGenVertexArrays() and glBindVertexArray()
#include <OpenGL/gl3.h>
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
#define printGlErr(func) err = glGetError(); if (err) fprintf(stderr, func " error: %u at line %d\n", err, __LINE__);
GLuint err;
int main(int argc, const char * argv[]) {
if (!glfwInit()) {
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window;
window = glfwCreateWindow(1024, 768, "Triangle", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
// Init stuff
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
printGlErr("glGenBuffers()");
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
printGlErr("glBindBuffer()");
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
printGlErr("glBufferData()");
do {
// Draw stuff
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
printGlErr("glVertexAttribPointer()");
glDrawArrays(GL_TRIANGLES, 0, 3);
printGlErr("glDrawArrays()");
glDisableVertexAttribArray(0);
printGlErr("glDisableVertexAttribArray()");
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} while(glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS && glfwWindowShouldClose(window) == 0); // Check if the ESC key was pressed or the window was closed
printf("Goodbye!\n");
return(0);
}
I can't seem to get the code to work. I'm getting a GL_INVALID_OPERATION on the glDrawArrays() function call.
The code renders a black window on Mavericks and on Yosemite. This is my hardware specs: http://support.apple.com/kb/SP653
It does not crash, it exits on ESC, but nothing else happens (other than spamming the error code on the console). The results are the same using any of the 2 available video boards.
Also, I'm compiling the source code using the following line on terminal:
gcc main.c -o triangle -framework OpenGl -framework Cocoa -framework IOKit -framework CoreVideo -I../glfw/include/GLFW -L../glfw/src -lglfw3

Trouble with simple VBO example

I'm trying to get a super simple GLFW and VBO example running, but I'm stuck. I've used glBegin and glEnd for other projects, but I'm trying to update my code to work with OpenGL ES 2, but for now I just want to be as forward compatible as possible.
From some examples online, I've come up with the following code:
#include <stdlib.h>
#include <GL/glfw3.h>
#include <stdio.h>
#define bool int
#define true 1
#define false 0
const int width = 800;
const int height = 600;
bool opened = true;
GLuint glTriangle;
GLfloat triangle[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
void init() {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glGenBuffers(1, &glTriangle);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
}
void display() {
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glVertexAttribPointer(0L, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
}
int windowClose(GLFWwindow window) {
opened = false;
return GL_TRUE;
}
void main() {
if (!glfwInit()) {
printf("glfwInit didn't work\n");
return;
}
GLFWwindow window = glfwCreateWindow(width, height, GLFW_WINDOWED, "Test", 0);
glfwMakeContextCurrent(window);
init();
glfwSetWindowCloseCallback(window, windowClose);
glfwSwapInterval(1);
while(opened) {
display();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
}
This code runs, but all I get is a black screen when I expected to see a white triangle. I've tried to avoid anything fancy like shaders/textures until I at least get something painted to the screen.
If it matters, I'm compiling with: gcc -o test -lGL -lglfw -lm test.c on Linux with OpenGL version 2.1 through the Mesa driver.
What am I doing wrong? If I need to specify color information, what is a simple way to do that?
You shouldn't be using glEnableVertexAttribArray / glVertexAttribPointer with the fixed pipeline. It might work (I'm not positive, I think attrib 0 might alias to the vertex attrib array, but not sure). It's not really correct to do so.
If you don't have a shader you should be using glEnableClientState and glVertexPointer for vertex data.
The default color is (1,1,1,1), so I think you should be ok on colors for now.

Resources