Triangle not rendering in GLFW3 - c

I recently downloaded GLFW3 since it's better than GLUT from what I heard. I managed to get a window to display and change the clear colors but I cannot figure out why I'm not rendering anything in my draw calls. In this case, it's a triangle. I'm running this on XCode 9.2 and this is the code I have right now:
#define GLFW_INCLUDE_GLCOREARB
#include <stdio.h>
#include <stdlib.h>
#include <GLFW/glfw3.h>
static const GLfloat vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
int main(int argc, const char * argv[]) {
GLuint VertexBufferID;
GLFWwindow* window;
/* Initialize the library */
if ( !glfwInit() )
{
return -1;
}
#ifdef __APPLE__
/* We need to explicitly ask for a 3.2 context on OS X */
glfwWindowHint (GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint (GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint (GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint (GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#endif
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow( 400 , 400, "Hello World", NULL, NULL );
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
glGenBuffers(1, &VertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
//Edit in
**program = initShaders(VSHADER_SOURCE, FSHADER_SOURCE);**
while (!glfwWindowShouldClose(window))
{
/* Render here */
//set clear color
glClearColor(0.0, 0.0, 0.0, 1.0);
//clear window
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers
//Draw
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
//got error 0x502 on line below
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
//Edit in
**glUseProgram(program);**
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
It's probably a minor mistake but I can't see it.
Edit: Okay shaders are required here from what I'm told. I don't know how I got away with it in GLUT. I guess it was an older version. So here are the shader programs I'm using.
"#version 330 core\n"
"layout(location = 0) in vec3 vertexPosition_modelspace;\n"
"void main()\n"
"{\n"
" gl_Position.xyz = vertexPosition_modelspace;\n"
" gl_Position.w = 1.0;\n"
"}\n";
"#version 330 core\n"
"out vec3 color;\n"
"void main()\n"
"{\n"
" color = vec3(1, 0, 0);\n"
"}\n";
I should also mention that I've been following this tutorial for help as well. http://www.opengl-tutorial.org/beginners-tutorials/tutorial-2-the-first-triangle/
As for errors, I found an error in glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0); as code 502, where it apparently means GL_INVALID_OPERATION, which I don't know what that means in this case.

The 2nd parameter of glBufferData is the target type of the buffer and not the named buffer object itself. glBufferData uses the named buffer object which is bound to the specified target:
glBufferData(
GL_ARRAY_BUFFER, // GL_ARRAY_BUFFER instead of VertexBufferID
sizeof(vertex_buffer_data),
vertex_buffer_data,
GL_STATIC_DRAW);
If you want to use a OpenGL Core profile context, then you have to use a shader program, this is not optional.
Further you have to create a named Vertex Array Object, because the default vertex array object (0) is not present in core profile context.
The modern way of rendering in OpenGL, would be to use a Shader program.
If you don't want to use use a shader program, than you have to use a compatibility context and you have to define the array of vertex data using the deprected way by glVertexPointer and you have to enable the client-side capability for vertex coordinates by glEnableClientState( GL_VERTEX_ARRAY ).
glfwWindowHint (GLFW_OPENGL_PROFILE,
GLFW_OPENGL_COMPAT_PROFILE); // instead of GLFW_OPENGL_CORE_PROFILE
.....
glGenBuffers(1, &VertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glBufferData(GL_ARRAY_BUFFER,
sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
.....
glEnableClientState( GL_VERTEX_ARRAY );
glBindBuffer(GL_ARRAY_BUFFER, VertexBufferID);
glVertexPointer(3, GL_FLOAT, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableClientState( GL_VERTEX_ARRAY );

Related

GLSL Shader going black when I try to sample a texture

I render a triangle strip this way, and with basic bypass shaders all is working fine:
EDIT:
I added TextCoords and modified the shaders , I keep getting the same result, my 3d objects are going black!
UPDATED CODE:
// Dibuixem tots els prismes
glBegin(GL_TRIANGLE_STRIP);
for(i=0;i<num_elems;i++) {
for(j=0;j<num_vertices;j++) {
glNormal3fv((GLfloat *)(a+j*2));
glTexCoord2f(0.0f, 0.0f);
glVertex3fv((GLfloat *)(a+j*2+1));
glTexCoord2f(1.0f, 0.0f);
glNormal3fv((GLfloat *)(b+j*2));
glTexCoord2f(1.0f, 1.0f);
glVertex3fv((GLfloat *)(b+j*2+1));
}
glNormal3fv((GLfloat *)(a));
glTexCoord2f(0.0f, 1.0f);
glVertex3fv((GLfloat *)(a+1));
glNormal3fv((GLfloat *)(b));
glTexCoord2f(0.0f, 0.0f);
glVertex3fv((GLfloat *)(b+1));
a+=face_size;
b+=face_size;
}
glEnd();
And I am trying to attach a texture to my shaders, but I can't figure out how to pass the texture.
I create and add the texture to my program this way. Texture data is verified
array with format unsigned char data[imageSize];:
GLuint textureID;
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glActiveTexture(GL_TEXTURE0); // Texture unit 0
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0,GL_BGR, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
GLuint VertexShaderID = glCreateShader(GL_VERTEX_SHADER);
This is what I read in another posts with the same issue and I added to my code after compiling my shaders and generating my program without errors.
Tutorials tend to dismiss this information (how you say to your shader the name and location of your binded texture).
GLuint t1Location = glGetUniformLocation(programID, "tex1");
glUniform1i(t1Location, 0);
And my shaders UPDATED CODE:
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
110,
varying vec4 position;
varying vec3 normal;
varying out vec4 texCoord;
varying vec2 coord;
void main()
{
position = gl_ModelViewMatrix * gl_Vertex;
normal = normalize( gl_NormalMatrix * gl_Normal.xyz );
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
coord = vec2(gl_MultiTexCoord0);
}
);
const char* frag = GLSL
(
110,
uniform sampler2D tex1;
varying vec4 position;
varying vec3 normal;
varying vec2 coord;
void main()
{
gl_FragColor = texture2D(tex1, coord);
}
);
EDIT2:
I am setting up gl this way:(maybe something is conflicting with my texture shader, but I don't think so!
/* set up depth-buffering */
glEnable(GL_DEPTH_TEST);
glEnable(GL_POLYGON_SMOOTH);
glHint(GL_POLYGON_SMOOTH_HINT, GL_FASTEST);
/* set up lights */
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);
glShadeModel(GL_SMOOTH);
GLfloat lightpos[] = { 3.0, 0.0, 1.0, 0.0 };
GLfloat lightcolor[] = { 0.5, 0.5, 0.5, 1.0 };
GLfloat ambcolor[] = { 0.5, 0.5, 0.5, 1.0 };
glLightModelfv(GL_LIGHT_MODEL_AMBIENT,ambcolor);
glEnable(GL_LIGHTING);
glColorMaterial(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE);
glEnable (GL_COLOR_MATERIAL);
glLightfv (GL_LIGHT0,GL_POSITION,lightpos);
glLightfv (GL_LIGHT0,GL_AMBIENT,ambcolor);
glLightfv (GL_LIGHT0,GL_DIFFUSE,lightcolor);
glLightfv (GL_LIGHT0,GL_SPECULAR,lightcolor);
glLightf (GL_LIGHT0,GL_CONSTANT_ATTENUATION,0.2);
glLightf (GL_LIGHT0,GL_LINEAR_ATTENUATION,0.0);
glLightf (GL_LIGHT0,GL_QUADRATIC_ATTENUATION,1.0);
glEnable (GL_LIGHT0);
glEnable(GL_TEXTURE_2D);
Replacing gl_FragColor by a flat color is working fine.
I know maybe is related to coord parameter but I am trying all the stuff I found and nothing is working for me.
The internal texture format GL_BGR is not valid. GL_BGR is a valid for the format of the source texture, but the internal representation has to be GL_RGB.
See glTexImage2D.
Adapt your code like this:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, data);
See the Khronos reference page GLAPI/glTexImage2D which says:
To define texture images, call glTexImage2D. The arguments describe the parameters of the texture image, such as height, width, width of the border, level-of-detail number (see glTexParameter), and number of color components provided. The last three arguments describe how the image is represented in memory.
format​ determines the composition of each element in data​. It can assume one of these symbolic values:
GL_BGR:
Each element is an RGB triple. The GL converts it to floating point and assembles it into an RGBA element by attaching 1 for alpha. Each component is clamped to the range [0,1].

OpenGL+GLFW glGenVertexArrays returns GL_INVALID_OPERATION

In trying to move into using "modern" OpenGL (basically 3.2+), I've run into some troubles running basic code (derived from both here and here) using GLFW, GLEW, and OpenGL.
My first problem is that with the below code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
const GLchar* vertexSource =
"#version 150 core\n"
"in vec2 position;"
"void main()"
"{"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 150 core\n"
"out vec4 outColor;"
"void main()"
"{"
" outColor = vec4(1.0, 1.0, 1.0, 1.0);"
"}";
void checkErr(const char* msg) {
GLenum err = glGetError();
if (err != 0) {
printf("# \"%s\": %d\n", msg, err);
exit(EXIT_FAILURE);
} else {
printf("# \"%s\": successful\n", msg);
}
}
int main(int argc, char* argv[]) {
GLFWwindow* window;
// Initialize GLFW
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Create a windowed mode window and its OpenGL context
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
// Make the window's context current
glfwMakeContextCurrent(window);
// Initialize GLEW
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
const GLubyte* glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
printf ("Renderer: %s\n", renderer);
printf ("OpenGL version: %s\n", version);
printf ("GLSL version: %s\n", glslVersion);
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
checkErr("Gen VAO");
glBindVertexArray(vao);
checkErr("Bind VAO");
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
checkErr("Gen VBO");
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
checkErr("Bind VBO");
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
checkErr("VBO data");
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
checkErr("Compile vert shader");
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
checkErr("Compile frag shader");
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
checkErr("Link program");
glUseProgram(shaderProgram);
checkErr("Use program");
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
checkErr("Enable vertex attrib");
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
checkErr("Describe vert data");
// Loop until the user closes the window
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
}
I'm immediately running into GL_INVALID_OPERATION errors on the very first step of setting up the vertex array object.
I've done a fair bit of research regarding OS X's flimsy OpenGL support, but so far most of the things I've modified in this code has done nothing more but produce a completely black screen (that is, when I removed the crashing behavior of the checkError helper function).
For reference, I'm running on an early 2015 MacBook Pro w/Retina, OS X v10.11.3, and the output of the version info from my above program lists as follows:
Renderer: Intel(R) Iris(TM) Graphics 6100
OpenGL version: 4.1 INTEL-10.12.13
GLSL version: 4.10
Any help is greatly appreciated!
You just assumed that your error was generated by glGenVertexArrays. But that is not the case. It is generated by glewInit. And this is because GLEW is just broken on core profile opengl: it uses glGetString(GL_EXTENSIONS) to query the extension string, which is not available in core prodiles and generates a GL_INVALID_ENUM error (1280).
Normally, glewInit will then abort with return code GL_FALSE. However, the "workaround" of setting glewExperimental=GL_TRUE will make it going on, ignoring the error, and querying all the extension pointers anyway. This is now broken in at least 3 different regards:
All the GLEW variables for querying the avialibility of specific extensions return false even when the extension is available.
It will retrieve function pointers for extension functions whose availibility hasn't been advertised by the implementation. It is not guaranteed that those pointers will be NULL, but calling these would be undefined behavior. Together with 1, this means you have no way of checking the availibility of any extension, except by manually doing the stuff glew is actually there for to do for you.
It will leave the GL context in an error state.
As a quick & dirty hack, you can just add a glGetError() right after glewInit, to read the error away. After I did that, your code produced the expected white triangle on my implementation (NVIDIA/Linux).
A better fix is probably to switch over to another GL loader which does properly work with core profiles, for example glad. Switching over will not be really hard, as only that init function has to be replaced. Note that glad is not a loader library, but a python script which generates a loader source file for your needs, so you don't need to link another library, but just have to add another source file to your project.

Cannot show texture in OpenGL 4.3 using VertexArrayObject

I'm knocking my head on this code since two days ago. It seems that there's no error of any sort creating buffers or textures, but the texture doesn't show.
Here is my code for the texture load:
struct image2d texImage = loadBMPImage(filePath);
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &(result.external->texID));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, result.external->texID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, texImage.width, texImage.height, 0, GL_RGB, GL_UNSIGNED_BYTE, texImage.pixels);
free(texImage.pixels);
The image2d structure is this one
struct image2d{
unsigned int width, height;
unsigned char* pixels;
};
Yes, I'm enabling GL_TEXTURE_2D via glEnable()
Then my mesh is drawn wiith this code
void MeshDraw(Mesh m, int renderType)
{
glBindVertexArray(m.external->vao);
glBindBuffer(GL_ARRAY_BUFFER, m.external->vbo);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, VERTEX_SIZE*4, 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, VERTEX_SIZE*4, (void*)12);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m.external->ibo);
glDrawElements(GL_TRIANGLES, m.external->sizeFc * 3, GL_UNSIGNED_SHORT, 0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
return;
}
And finally here is my vertex shader
#version 430 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 inTexCoord;
uniform mat4 transform;
out vec2 texCoord;
out vec3 outPos;
void main(void)
{
outPos = position;
gl_Position = transform * vec4(position, 1.0);
texCoord = inTexCoord;
}
And here is my fragment shader
#version 430 core
out vec4 drawColor;
in vec2 texCoord;
in vec3 outPos;
uniform sampler2D sampler;
void main(void)
{
drawColor = texture(sampler, texCoord);
//drawColor = vec4(clamp(outPos, 0.0, 1.0), 1.0);
}
If you need to look at the whole project I'm posting it here
I'll appreciate any kind of help :)
Additional code (which is also in download if anyone wants to see it)
void initOpenGL()
{
printf("OpenGL version: %s\n",glGetString(GL_VERSION));
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glEnable(GL_FRAMEBUFFER_SRGB);
return;
}
This is called right after making the context and initializing glew.
void RenderGame(Game g)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
ShaderBind(g.external->sh);
setUniformmat4(g.external->sh, "transform", TransformGetProjectedTransformation(g.external->transf));
TextureBind(g.external->texture);
MeshDraw(g.external->msh, GL_TRIANGLES);
glFlush();
glfwSwapBuffers(WindowGetHandler(g.external->window));
return;
}
And this is my render method.
Your texture is not mipmap-complete, but you are still using the default GL_NEAREST_MIPMAP_LINEAR minification filter, so sampling the texture will fail.
You try to set it to GL_NEAREST, but this sqeuence of operations is wrong:
glGenTextures(1, &(result.external->texID));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, result.external->texID);
In the GL, the texture sampler state is part of the texture object itself (there are also separate sampler objects available nowadays which override that state, but you don't seem to use them either), and glTexParameteri() affects the currently bound texture object at the time of the call. I don't know if some texture is bound at that time, or none at all - but certainly, the new texture is not, so it will stick with the inital default of GL_NEAREST_MIPMAP_LINEAR...
SOLUTION It seems that #peppe was right all the time. Just to be meticulous i set the sampler to 0 with the setuniform call and it workef. The problem is that it didn't work as expected, and it was because the function that loads the bitmap file that was wrong. Now it works like a charm :) Thank you guys!

Trouble with simple VBO example

I'm trying to get a super simple GLFW and VBO example running, but I'm stuck. I've used glBegin and glEnd for other projects, but I'm trying to update my code to work with OpenGL ES 2, but for now I just want to be as forward compatible as possible.
From some examples online, I've come up with the following code:
#include <stdlib.h>
#include <GL/glfw3.h>
#include <stdio.h>
#define bool int
#define true 1
#define false 0
const int width = 800;
const int height = 600;
bool opened = true;
GLuint glTriangle;
GLfloat triangle[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
void init() {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glGenBuffers(1, &glTriangle);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
}
void display() {
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, glTriangle);
glVertexAttribPointer(0L, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
}
int windowClose(GLFWwindow window) {
opened = false;
return GL_TRUE;
}
void main() {
if (!glfwInit()) {
printf("glfwInit didn't work\n");
return;
}
GLFWwindow window = glfwCreateWindow(width, height, GLFW_WINDOWED, "Test", 0);
glfwMakeContextCurrent(window);
init();
glfwSetWindowCloseCallback(window, windowClose);
glfwSwapInterval(1);
while(opened) {
display();
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
}
This code runs, but all I get is a black screen when I expected to see a white triangle. I've tried to avoid anything fancy like shaders/textures until I at least get something painted to the screen.
If it matters, I'm compiling with: gcc -o test -lGL -lglfw -lm test.c on Linux with OpenGL version 2.1 through the Mesa driver.
What am I doing wrong? If I need to specify color information, what is a simple way to do that?
You shouldn't be using glEnableVertexAttribArray / glVertexAttribPointer with the fixed pipeline. It might work (I'm not positive, I think attrib 0 might alias to the vertex attrib array, but not sure). It's not really correct to do so.
If you don't have a shader you should be using glEnableClientState and glVertexPointer for vertex data.
The default color is (1,1,1,1), so I think you should be ok on colors for now.

OpenGL draw buffer problems

I've begun doing a bit of OpenGL, and I've just discovered alternatives to the fixed function pipeline, so I went searching tutorials. Sadly, most good tutorials use C++, and I would like to stick to C if possible. So I wasn't able to just copy-paste stuff and try it.
Copy-pasting and merging code gave errors though, and I'm not sure how I should deal with them. The error in question was "Segmentation fault", and here is the code:
#include <GL/glfw.h>
#include <GL/glu.h>
#include <stdlib.h>
#define SCREEN_WIDTH 1600
#define SCREEN_HEIGHT 1000
void main(int argc, char **argv)
{
float mouse_x, mouse_y;
GLuint positionBufferObject;
const float vertexPositions[] = {
0.75f, 0.75f, 0.0f, 1.0f,
0.75f, -0.75f, 0.0f, 1.0f,
-0.75f, -0.75f, 0.0f, 1.0f,
};
glGenBuffers(1, &positionBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, positionBufferObject);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexPositions), vertexPositions, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
int running = GL_TRUE;
// Initialize GLFW
if( !glfwInit() )
{
exit( EXIT_FAILURE );
}
// Open an OpenGL window
if( !glfwOpenWindow(SCREEN_WIDTH,SCREEN_HEIGHT, 8,8,8,8,8,8, GLFW_WINDOW ) )
{
glfwTerminate();
exit( EXIT_FAILURE );
}
while (running)
{
glBindBuffer(GL_ARRAY_BUFFER, positionBufferObject);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
}
Of course, a lot of code is missing here, because it (apparently) didn't influence the error.
Commenting out "glDrawArrays" fixes the error, and OpenGL runs normally (without drawing anything, logically).
Any help?
You should initialise gl before you start binding buffers and so on. The initialisation process fetches and assigns the pointers to these functions. Otherwise they're null.

Resources