note: glGetString(GL_VERSION) returned: 2.1.0 - Build 8.15.10.2361
I have installed GLEW to System32, and VCDir/lib and VCDir/include directories. So the linker should not have any issues finding the necessary bits of GLEW. The trouble I am having though is that the following code:
void foo()
{
Uint32 vboId;
glGenBuffers(1, &vboId);
}
Gives me the following error:
unresolved external symbol __imp____glewGenBuffers
This error was the entire reason I decided to install GLEW. See: unresolved external symbol _glGenBuffers error
In addition to this error, there are a few warnings that are a bit concerning:
Warning 1 warning C4028: formal parameter 3 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4855 1 Prototek
Warning 2 warning C4028: formal parameter 3 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4857 1 Prototek
Warning 3 warning C4028: formal parameter 2 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4859 1 Prototek
Warning 4 warning C4028: formal parameter 2 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4861 1 Prototek
Warning 5 warning C4028: formal parameter 3 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4868 1 Prototek
Warning 6 warning C4028: formal parameter 3 different from declaration c:\sdl-1.2.14\include\sdl_opengl.h 4869 1 Prototek
Also, is it a good idea to simply install SDL to my VC inc and lib and System32 directories as I have done for GLEW?
My #include's at the top of my file look like this:
#include <stdio.h>
#include <glew.h>
#include "sdl.h"
#include "sdl_opengl.h"
#include <gl/GLU.h>
#include <gl/GL.h>
But in case it is needed, here is the entire body of code:
#include <stdio.h>
#include <glew.h>
#include "sdl.h"
#include "sdl_opengl.h"
#include <gl/GLU.h>
#include <gl/GL.h>
Uint32 loadTexture(char* fileName)
{
Uint32 id;
SDL_Surface *img = NULL;
//load into memory using SDL
img = SDL_LoadBMP(fileName);
//generate an id for this texture
glGenTextures(1, &id);
//use this texture
glBindTexture(GL_TEXTURE_2D, id);
//load the texture into video memory via OpenGL
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB,
img->w,
img->h,
0,
GL_BGR,
GL_UNSIGNED_BYTE,
img->pixels
);
//set mip map settings
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
SDL_FreeSurface(img);
return id;
}
Uint32 tex;
void init()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1.0, 1.0, 0.0, -1.0, 1.0);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_TEXTURE_2D);
tex = loadTexture("fireball.bmp");
}
void foo()
{
Uint32 vboId;
glGenBuffers(1, &vboId);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
glBindTexture(GL_TEXTURE_2D, tex);
glBegin(GL_QUADS);
glTexCoord2f(0.5, 0.5);
glVertex3f(0.25, 0.25, 0.0);
glTexCoord2f(1.0, 0.5);
glVertex3f(0.5, 0.25, 0.0);
glTexCoord2f(1.0, 1.0);
glVertex3f(0.5, 0.5, 0.0);
glTexCoord2f(0.5, 1.0);
glVertex3f(0.25, 0.5, 0.0);
glEnd();
}
int main()
{
Uint32 isRunning = 1;
SDL_Surface *screen = NULL;
SDL_Event event;
Uint32 start;
Uint32 FPS = 30;
SDL_Init(SDL_INIT_EVERYTHING);
screen = SDL_SetVideoMode(640, 480, 32, SDL_OPENGL);
init();
while(isRunning)
{
start = SDL_GetTicks();
while(SDL_PollEvent(&event))
{
switch(event.type)
{
case SDL_QUIT: isRunning = 0; break;
}
}
display();
SDL_GL_SwapBuffers();
if(1000 / FPS > SDL_GetTicks() - start)
{
SDL_Delay(1000 / FPS - (SDL_GetTicks() - start));
}
}
SDL_Quit();
return(0);
}
I noticed you didn't include a call to glewInit(), if you are planning to use glew make sure you call it before using any of the openGL function pointers it provides.
Did you actually link to the GLEW library? Simply telling VS where the directory is isn't enough; you have to tell it what library to link to.
A quick way to test if you're linking to the library is to delete it and recompile. If VC doesn't complain about not being able to find a particular library, then you didn't link to it.
Also, is it a good idea to simply install SDL to my VC inc and lib and System32 directories as I have done for GLEW?
It wasn't a good idea to install GLEW there, let alone anything else. Visual Studio is not like a Linux install; there isn't a global repository of libraries and headers. It is up to you to point your VS project to the particular libraries and header files (you can even set these globally if you want). Leave the VS directories alone.
glGenBuffers second parameter is a pointer to a GLuint (GLuint ), not a Pointer to a Pointer to a GLuint (GLuint *, which is what you're passing). Instead, do this:
Uint32 vboId;
glGenBuffers(1, &vboId);
Related
I recently downloaded GLFW3 since it's better than GLUT from what I heard. I managed to get a window to display and change the clear colors but I cannot figure out why I'm not rendering anything in my draw calls. In this case, it's a triangle. I'm running this on XCode 9.2 and this is the code I have right now:
#define GLFW_INCLUDE_GLCOREARB
#include <stdio.h>
#include <stdlib.h>
#include <GLFW/glfw3.h>
static const GLfloat vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
int main(int argc, const char * argv[]) {
GLuint VertexBufferID;
GLFWwindow* window;
/* Initialize the library */
if ( !glfwInit() )
{
return -1;
}
#ifdef __APPLE__
/* We need to explicitly ask for a 3.2 context on OS X */
glfwWindowHint (GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint (GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint (GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint (GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#endif
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow( 400 , 400, "Hello World", NULL, NULL );
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
glGenBuffers(1, &VertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
//Edit in
**program = initShaders(VSHADER_SOURCE, FSHADER_SOURCE);**
while (!glfwWindowShouldClose(window))
{
/* Render here */
//set clear color
glClearColor(0.0, 0.0, 0.0, 1.0);
//clear window
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers
//Draw
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
//got error 0x502 on line below
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
//Edit in
**glUseProgram(program);**
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
It's probably a minor mistake but I can't see it.
Edit: Okay shaders are required here from what I'm told. I don't know how I got away with it in GLUT. I guess it was an older version. So here are the shader programs I'm using.
"#version 330 core\n"
"layout(location = 0) in vec3 vertexPosition_modelspace;\n"
"void main()\n"
"{\n"
" gl_Position.xyz = vertexPosition_modelspace;\n"
" gl_Position.w = 1.0;\n"
"}\n";
"#version 330 core\n"
"out vec3 color;\n"
"void main()\n"
"{\n"
" color = vec3(1, 0, 0);\n"
"}\n";
I should also mention that I've been following this tutorial for help as well. http://www.opengl-tutorial.org/beginners-tutorials/tutorial-2-the-first-triangle/
As for errors, I found an error in glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0); as code 502, where it apparently means GL_INVALID_OPERATION, which I don't know what that means in this case.
The 2nd parameter of glBufferData is the target type of the buffer and not the named buffer object itself. glBufferData uses the named buffer object which is bound to the specified target:
glBufferData(
GL_ARRAY_BUFFER, // GL_ARRAY_BUFFER instead of VertexBufferID
sizeof(vertex_buffer_data),
vertex_buffer_data,
GL_STATIC_DRAW);
If you want to use a OpenGL Core profile context, then you have to use a shader program, this is not optional.
Further you have to create a named Vertex Array Object, because the default vertex array object (0) is not present in core profile context.
The modern way of rendering in OpenGL, would be to use a Shader program.
If you don't want to use use a shader program, than you have to use a compatibility context and you have to define the array of vertex data using the deprected way by glVertexPointer and you have to enable the client-side capability for vertex coordinates by glEnableClientState( GL_VERTEX_ARRAY ).
glfwWindowHint (GLFW_OPENGL_PROFILE,
GLFW_OPENGL_COMPAT_PROFILE); // instead of GLFW_OPENGL_CORE_PROFILE
.....
glGenBuffers(1, &VertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glBufferData(GL_ARRAY_BUFFER,
sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
.....
glEnableClientState( GL_VERTEX_ARRAY );
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glVertexPointer(3, GL_FLOAT, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableClientState( GL_VERTEX_ARRAY );
In trying to move into using "modern" OpenGL (basically 3.2+), I've run into some troubles running basic code (derived from both here and here) using GLFW, GLEW, and OpenGL.
My first problem is that with the below code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"void main()"
"{"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"out vec4 outColor;"
"void main()"
"{"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
void checkErr(const char* msg) {
GLenum err = glGetError();
if (err != 0) {
printf("# \"%s\": %d\n", msg, err);
exit(EXIT_FAILURE);
} else {
printf("# \"%s\": successful\n", msg);
}
}
int main(int argc, char* argv[]) {
GLFWwindow* window;
// Initialize GLFW
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
// Make the window's context current
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
const GLubyte* glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
printf ("Renderer: %s\n", renderer);
printf ("OpenGL version: %s\n", version);
printf ("GLSL version: %s\n", glslVersion);
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
checkErr("Gen VAO");
glBindVertexArray(vao);
checkErr("Bind VAO");
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
checkErr("Gen VBO");
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
checkErr("Bind VBO");
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
checkErr("VBO data");
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
checkErr("Compile vert shader");
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
checkErr("Compile frag shader");
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
checkErr("Link program");
glUseProgram(shaderProgram);
checkErr("Use program");
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
checkErr("Enable vertex attrib");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
checkErr("Describe vert data");
// Loop until the user closes the window
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
I'm immediately running into GL_INVALID_OPERATION errors on the very first step of setting up the vertex array object.
I've done a fair bit of research regarding OS X's flimsy OpenGL support, but so far most of the things I've modified in this code has done nothing more but produce a completely black screen (that is, when I removed the crashing behavior of the checkError helper function).
For reference, I'm running on an early 2015 MacBook Pro w/Retina, OS X v10.11.3, and the output of the version info from my above program lists as follows:
Renderer: Intel(R) Iris(TM) Graphics 6100
OpenGL version: 4.1 INTEL-10.12.13
GLSL version: 4.10
Any help is greatly appreciated!
You just assumed that your error was generated by glGenVertexArrays. But that is not the case. It is generated by glewInit. And this is because GLEW is just broken on core profile opengl: it uses glGetString(GL_EXTENSIONS) to query the extension string, which is not available in core prodiles and generates a GL_INVALID_ENUM error (1280).
Normally, glewInit will then abort with return code GL_FALSE. However, the "workaround" of setting glewExperimental=GL_TRUE will make it going on, ignoring the error, and querying all the extension pointers anyway. This is now broken in at least 3 different regards:
All the GLEW variables for querying the avialibility of specific extensions return false even when the extension is available.
It will retrieve function pointers for extension functions whose availibility hasn't been advertised by the implementation. It is not guaranteed that those pointers will be NULL, but calling these would be undefined behavior. Together with 1, this means you have no way of checking the availibility of any extension, except by manually doing the stuff glew is actually there for to do for you.
It will leave the GL context in an error state.
As a quick & dirty hack, you can just add a glGetError() right after glewInit, to read the error away. After I did that, your code produced the expected white triangle on my implementation (NVIDIA/Linux).
A better fix is probably to switch over to another GL loader which does properly work with core profiles, for example glad. Switching over will not be really hard, as only that init function has to be replaced. Note that glad is not a loader library, but a python script which generates a loader source file for your needs, so you don't need to link another library, but just have to add another source file to your project.
Been a while since I've needed to ask a question,
I'm trying out SDL2 and OpenGL (3.3 which is the compatibility limit with mesa) because the GLSL really interests me, however on my work machine I learned very quickly that it isn't easy to get things to work. Every tutorial I've used and even the Mesa demos themselves use headers that don't come with Ubuntu's base GL library, I've already resigned and installed GLEW but it doesn't feel right continously adding libraries to make things work, the GL headers I have are:
glcorearb.h, glew.h, glext.h, gl.h, gl_mangle.h, glu.h, glu_mangle.h,
glxew.h, glxext.h, glx.h, glxint.h, glx_mangle.h, glxmd.h, glxproto.h,
glxtokens.h, wglew.h
I tried following LazyFoo's tutorials but did not have the same result of getting a white quad to appear. I followed opengl-tutorial's tutorials and did not get the same result of a white triangle to appear (it mentions to not worry if you don't see it at first, but doesn't explain what to do in the case that it doesn't (I tried to follow the rest of the tutorial, but I'm writing this in C and not C++ so I'm worried about straying too far from the tutorials' and confusing the issue further. I've installed SDL2 and made sure I had everything. This is the code I have in my current SDL2/GL program, it does not at all reveal a white triangle, it's a combination of tutorials, but I've read all the SDL API material I could to make sure nothing on the SDL side affects what GL tries to do.
#define SDL_ASSERT_LEVEL 3
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_error.h>
#include <SDL2/SDL_assert.h>
#include <SDL2/SDL_version.h>
#include <SDL2/SDL_events.h>
#include <GL/gl.h>
int main(){
SDL_version compiledWith, linkedWith;
SDL_VERSION(&compiledWith);
SDL_GetVersion(&linkedWith);
if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO) < 0){
fprintf(stderr, "\nUnable to initialize SDL: %s\n", SDL_GetError());
exit(1);
}
SDL_Log("\nCompiled with: %d.%d.%d\n", compiledWith.major,
compiledWith.minor, compiledWith.patch);
SDL_Log("\nLinked with: %d.%d.%d\n", linkedWith.major,
linkedWith.minor, linkedWith.patch);
SDL_Window* window = SDL_CreateWindow("SDL2/OpenGL Demo", 0, 0, 640, 480,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);
//Can now make GL calls after the below line
SDL_GLContext glContext = SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
static const GLfloat gVertexBufferData[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertexBufferData),
gVertexBufferData, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow(window);
SDL_Event theEvent;
bool running = true;
while(running){
while(SDL_PollEvent(&theEvent)){
switch(theEvent.type){
case SDL_QUIT:
SDL_Log("\nQuit request acknowledged\n");
//Finish up GL usage
SDL_GL_DeleteContext(glContext);
//Finish up SDL usage
SDL_Quit();
running = false;
break;
default:
break;
}
}
}
return 0;
}
I use gcc main.c -lSDL2 -lGL -o test for linking, I suspect I might be missing linkage libraries but I'm not sure where I can check if I am or not, the compiler doesn't warn me about anything it can't find unless I follow a tutorial that uses something I don't have.
To conclude since this is a longer post than anticipated, the question is:
Am I missing any important library to actually get this to work on my system (Ubuntu 15.04 Intel Haswell Mobile x86/MMX/SSE2)?
Did I miss something in my code that is necessary to see the white triangle?
This question already has answers here:
How do I create an OpenGL 3.3 context in GLFW 3
(1 answer)
Simple triangle using OpenGL and GLFW [duplicate]
(1 answer)
Closed 8 years ago.
I'm following the http://opengl-tutorials.org tutorial series, and came up with a very basic code for drawing a white triangle on screen, but I can't seem to get it to work.
I've compiled glfw without any additional options using appleshell's answer here: OpenGL 3.3/4.1 on Mac OSX 10.9 using GLFW library
I created a simple source code based on the site instructions:
#include <stdio.h>
#include <stdlib.h>
#include <glfw3.h>
// Need gl3.h for glGenVertexArrays() and glBindVertexArray()
#include <OpenGL/gl3.h>
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
#define printGlErr(func) err = glGetError(); if (err) fprintf(stderr, func " error: %u at line %d\n", err, __LINE__);
GLuint err;
int main(int argc, const char * argv[]) {
if (!glfwInit()) {
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window;
window = glfwCreateWindow(1024, 768, "Triangle", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
// Init stuff
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
printGlErr("glGenBuffers()");
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
printGlErr("glBindBuffer()");
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
printGlErr("glBufferData()");
do {
// Draw stuff
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
printGlErr("glVertexAttribPointer()");
glDrawArrays(GL_TRIANGLES, 0, 3);
printGlErr("glDrawArrays()");
glDisableVertexAttribArray(0);
printGlErr("glDisableVertexAttribArray()");
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} while(glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS && glfwWindowShouldClose(window) == 0); // Check if the ESC key was pressed or the window was closed
printf("Goodbye!\n");
return(0);
}
I can't seem to get the code to work. I'm getting a GL_INVALID_OPERATION on the glDrawArrays() function call.
The code renders a black window on Mavericks and on Yosemite. This is my hardware specs: http://support.apple.com/kb/SP653
It does not crash, it exits on ESC, but nothing else happens (other than spamming the error code on the console). The results are the same using any of the 2 available video boards.
Also, I'm compiling the source code using the following line on terminal:
gcc main.c -o triangle -framework OpenGl -framework Cocoa -framework IOKit -framework CoreVideo -I../glfw/include/GLFW -L../glfw/src -lglfw3
I'm trying to get a super simple GLFW and VBO example running, but I'm stuck. I've used glBegin and glEnd for other projects, but I'm trying to update my code to work with OpenGL ES 2, but for now I just want to be as forward compatible as possible.
From some examples online, I've come up with the following code:
#include <stdlib.h>
#include <GL/glfw3.h>
#include <stdio.h>
#define bool int
#define true 1
#define false 0
const int width = 800;
const int height = 600;
bool opened = true;
GLuint glTriangle;
GLfloat triangle[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
void init() {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glGenBuffers(1, &glTriangle);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
}
void display() {
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glVertexAttribPointer(0L, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
}
int windowClose(GLFWwindow window) {
opened = false;
return GL_TRUE;
}
void main() {
if (!glfwInit()) {
printf("glfwInit didn't work\n");
return;
}
GLFWwindow window = glfwCreateWindow(width, height, GLFW_WINDOWED, "Test", 0);
glfwMakeContextCurrent(window);
init();
glfwSetWindowCloseCallback(window, windowClose);
glfwSwapInterval(1);
while(opened) {
display();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
}
This code runs, but all I get is a black screen when I expected to see a white triangle. I've tried to avoid anything fancy like shaders/textures until I at least get something painted to the screen.
If it matters, I'm compiling with: gcc -o test -lGL -lglfw -lm test.c on Linux with OpenGL version 2.1 through the Mesa driver.
What am I doing wrong? If I need to specify color information, what is a simple way to do that?
You shouldn't be using glEnableVertexAttribArray / glVertexAttribPointer with the fixed pipeline. It might work (I'm not positive, I think attrib 0 might alias to the vertex attrib array, but not sure). It's not really correct to do so.
If you don't have a shader you should be using glEnableClientState and glVertexPointer for vertex data.
The default color is (1,1,1,1), so I think you should be ok on colors for now.