GLSL Uniform float not correctly passing - c

I'm trying to create a shader for a sceen-filling quad. But i can't seem to pass a uniform float to the shader.
In the following example i'm initializing glut, creating/compiling & linking the shaders, passing a uniform int & float to the shader, and getting them back to check. the int asdf works fine, the float qwer is behaving weirdly.
If i set the value of qwer to 1.3, the uniform is set to -2.0, if i set it to 1.2, the uniform is set to 2.0.
#include <stdio.h>
#include <string.h>
#include <GL/glut.h>
#include <unistd.h>
int gw = 640, gh = 360;
void drawScene(){
//creating a screen filling quad
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0f); glVertex2i(-1, 1);
glTexCoord2f(1.0, 0.0f); glVertex2i(1, 1);
glTexCoord2f(1.0, 1.0f); glVertex2i(1, -1);
glTexCoord2f(0.0, 1.0f); glVertex2i(-1, -1);
glEnd();
glutSwapBuffers();
}
void update(int value){
glutPostRedisplay();
glutTimerFunc(1000 / 30, update, 0);
}
int main(int argc, char** argv){
//shader source code
char *fraShdrStr = "\n\
uniform int asdf;\
uniform float qwer;\
void main(){\n\
vec2 p = gl_TexCoord[0].xy;\n\
gl_FragColor=vec4(p.x,qwer,float(asdf),1.0);\n\
}";
char *verShdrStr = "\n\
void main(){\n\
gl_Position=gl_ModelViewProjectionMatrix*gl_Vertex;\n\
gl_TexCoord[0]=gl_MultiTexCoord0;\n\
}";
size_t verShdrLen, fraShdrLen;
char errorBuffer[1024];
int errorLength;
int program, verShdr, fraShdr;
verShdrLen = strlen(verShdrStr);
fraShdrLen = strlen(fraShdrStr);
//initializing glut
glutInit(&argc, argv);
glutInitWindowSize(gw, gh);
glutCreateWindow("");
//creating, compiling and linking shaders
verShdr = glCreateShader(GL_VERTEX_SHADER);
fraShdr = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(verShdr, 1, &verShdrStr, &verShdrLen);
glShaderSource(fraShdr, 1, &fraShdrStr, &fraShdrLen);
glCompileShader(verShdr);
glGetShaderInfoLog(verShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Vertex Shader Error:\n%s\n", errorBuffer);
glCompileShader(fraShdr);
glGetShaderInfoLog(fraShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Fragmen Shader Error:\n%s\n", errorBuffer);
program = glCreateProgram();
glAttachShader(program, verShdr);
glAttachShader(program, fraShdr);
glLinkProgram(program);
glGetProgramInfoLog(program, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Linking Error:\n%s\n", errorBuffer);
glUseProgram(program);
//initializing variables to pass as uniform
int asdf = 9;
int asdf2;
float qwer = 1.0;
float qwer2;
//setting the uniform values
glUniform1i(glGetUniformLocation(program, "asdf"), asdf);
glGetUniformiv(program, glGetUniformLocation(program, "asdf"), &asdf2);
printf("%d\n", asdf2);
glUniform1f(glGetUniformLocation(program, "qwer"), qwer);
glGetUniformfv(program, glGetUniformLocation(program, "qwer"), &qwer2);
printf("%f\n", qwer2);
glutDisplayFunc(drawScene);
glutTimerFunc(1000/30, update, 0);
glutMainLoop();
}

You are misunderstanding the whole picture. Since OpenGL 3.0 using glBegin/glEnd in order to draw stuff is deprecated. Instead you should use an approach, based on using so-called Vertex Arrays. Check out this question to get an example code snippet.

Related

Can't pass color to FragmentShader

I'm trying to pass the color of my triangle through my main function, but when I try to do it my triangle only gets white like it has no fragment shader
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 aColor;
out vec3 color;
void main()
{
gl_Position = position
color = aColor;
}
Fragment Shader:
#version 330 core
in vec3 color;
out vec4 FragColor;
void main()
{
FragColor = vec4(color, 1.0);
};
main code:
float positions[] = {
-0.5f, -0.5f, 0.f, 0.8f, 0.3f, 0.02f, //inferior esquerdo
0.5, -0.5f, 0.f, 0.8f, 0.3f, 0.2f, //inferior direito
0.0f, (sqrt(3.f) - 1.f) / 2.f, 0.f, 1.f, 0.6f, 0.32f, //Topo
-0.5f / 2.f, (0.37f - 0.5f) / 2.f, 0.f, 0.9f, 0.45f, 0.17f, //Meio esquerdo
0.5f / 2, (0.37f - 0.5f) / 2.f, 0.f, 0.9f, 0.45f, 0.17f, //Meio direito
0.f, -0.5f, 0.f, 0.8f, 0.3f, 0.02f}; //Meio
unsigned int indices[9] =
{
0, 5, 3,
5, 1, 4,
3, 4, 2};
unsigned int buffer, attribute, EBO;
//Gera o Shader
struct Shader shaderProgram = beginShader("res/shaders/sv1.shader", "res/shaders/sf1.shader");
//Gerao Vertex Array Object e bainda ele
struct VAO vao1 = beginVAO();
vao1.Bind(vao1);
//Gera o Vertex Buffer linkando-o ao vértice
struct VBO vbo1 = beginVBO(positions, sizeof(positions));
//Gera o Element Buffer e linka ele com os índices do vetor posições
struct EBO ebo1 = beginEBO(indices, sizeof(indices));
//Conecta o VBO ao VAO
vao1.LinkAttrib(vbo1, 0, 3, GL_FLOAT, 6 * sizeof(float), (void *)0);
vao1.LinkAttrib(vbo1, 1, 3, GL_FLOAT, 6 * sizeof(float), (void *)(3 * sizeof(float)));
//Unbind em todos os objetos que foram bindados
vao1.Unbind(vao1);
vbo1.Unbind(vbo1);
ebo1.Unbind(ebo1);
while (true)
{
if (SDL_PollEvent(&windowEvent))
{
if (SDL_QUIT == windowEvent.type)
{
break;
}
}
glClearColor(0.05f, 0.57f, 0.38f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
shaderProgram.Activate(shaderProgram);
vao1.Bind(vao1);
glDrawElements(GL_TRIANGLES, 9, GL_UNSIGNED_INT, 0);
glEnd();
SDL_GL_SwapWindow(window); //update the window
}
vao1.Delete(vao1);
vbo1.Delete(vbo1);
ebo1.Delete(ebo1);
shaderProgram.Delete(shaderProgram);
SDL_DestroyWindow(window);
SDL_Quit();
LinkAttrib function:
void LinkAttrib(struct VBO VBO, GLuint layout, GLuint numComponents, GLenum type,
GLsizeiptr stride, void *offset)
{
VBO.Bind(VBO);
glVertexAttribPointer(layout, numComponents, type, GL_FALSE, stride, offset);
glEnableVertexAttribArray(layout);
VBO.Unbind(VBO);
}
As I'm written in C the functions I create have to pass the own struct as param si I have the functions begin to attribute to each function trying to simulate an object. The function begin shader gets the file with the shaders and compile them and generate the Program Shader, the begin VAO generate the vertex array object
This the draw I have
The vertex shader doesn't compile for 2 reasons:
There is missing a ; after gl_Position = position
gl_Position and position have different types
gl_Position = position
gl_Position = vec4(position, 1.0);

How to fix freetype incorrectly loading characters

The FreeType library opens and loads a font without throwing errors, but when I try and use the character textures they're repeated and upsidedown.
I'm creating a graphics-based program using OpenGL, to deal with fonts and text I'm using FreeType. When I load the texture and set it up, the correct sizes and glyph attributes (width, advance etc...) are provided, but when I use the bitmap to create a texture and use that texture it's incorrect (as described earlier).
here is the initialisation code:
FT_Init_FreeType(&ft);
FT_New_Face(ft, "Roboto.ttf", 0, &face);
FT_Set_Pixel_Sizes(face, 0, 100);
getChars();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
FT_Done_Face(face);
FT_Done_FreeType(ft);
here is the code that then gets the character textures:
void getChars(){
int index = 0;
for (GLubyte c = 0; c < 128; c++){
if (FT_Load_Char(face, c, FT_LOAD_RENDER)){
printf("couldn't load character\n");
continue;
}
GLuint texture;
glGenTextures(1,&texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,face->glyph->bitmap.width,face->glyph->bitmap.rows,0,GL_RGBA,GL_UNSIGNED_BYTE,face->glyph->bitmap.buffer);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
Character character = {texture, {face->glyph->bitmap.width, face->glyph->bitmap.rows}, {face->glyph->bitmap_left, face->glyph->bitmap_top}, face->glyph->advance.x};
chars[index++] = character;
}
printf("Got characters\n");
}
I've commented out the glTexParameteri s that didn't make any difference to the way the texture was displayed
Here are the structures used to store the textures:
typedef struct vec2 {
float x;
float y;
} vec2;
typedef struct Character {
GLuint Texture;
vec2 Size;
vec2 Bearing;
GLuint Advance;
} Character;
Character chars[128];
Finally here is the code that displays a string passed to it:
void drawText(char* inString, float x, float y, float scale, colour col) {
for (char* string = inString; *string != '\0'; string++){
Character ch = chars[(int) *string];
glBindTexture( GL_TEXTURE_2D, ch.Texture);
printf("character\n");
float xpos = x + ch.Bearing.x * scale * 2 /glutGet(GLUT_WINDOW_WIDTH);
float ypos = y - (ch.Size.y - ch.Bearing.y) * scale * 2 /glutGet(GLUT_WINDOW_HEIGHT);
float w = ch.Size.x * scale * 2 /glutGet(GLUT_WINDOW_WIDTH);
float h = ch.Size.y * scale * 2 /glutGet(GLUT_WINDOW_HEIGHT);
//draws the textured QUAD
glBegin(GL_QUADS);
//set the colour of the quad the texutre blends with to white with the appropriate opacity
glColor4f(col.R, col.G, col.B, col.A);
glTexCoord2f(0.0,0.0);
glVertex2f(xpos, ypos);
glTexCoord2f(0.0,1.0);
glVertex2f(xpos, ypos+h);
glTexCoord2f(1.0,1.0);
glVertex2f(xpos+w, ypos+h);
glTexCoord2f(1.0,0.0);
glVertex2f(xpos+w, ypos);
glEnd();
x += (ch.Advance >> 6) * scale * 2 /glutGet(GLUT_WINDOW_WIDTH);
printf("w %1.0f\n",w);
printf("h %1.0f\n",h);
printf("x %1.0f\n",xpos);
printf("y %1.0f\n",ypos);
}
}
I expect the normal text to be loaded, but as described earlier, each character looks like its own texture map
this is the way the text looks (ignore the image behind it)
The buffer which is returned by FT_Load_Char contains one byte for each pixel of the glyph.
If you would use modern OpenGL the you could use the format GL_RED for the bitmap format and the internal texture image format. the rest would do the Shader program.
Since you use Legacy OpenGL you've to use a format that converts the byte value to an RGB value. Use GL_LUMINANCE for that. GL_LUMINANCE converts "into an RGBA element by replicating the luminance value three times for red, green, and blue and attaching 1 for alpha".
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_LUMINANCE,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
Note that two-dimensional texturing has to be enabled, see glEnable:
glEnable(GL_TEXTURE_2D)
If you want to draw the text with an transparent background, then you can use GL_ALPHA:
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_ALPHA,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_ALPHA,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
When you draw the text, then you've to enable Blending:
void drawText(char* inString, float x, float y, float scale, colour col) {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
// [...]
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
Further glPixelStorei(GL_UNPACK_ALIGNMENT, 1); should be called before getChars();.

Font rendering (Freetype) with openGL ES 2.0 combined with other line drawing functions does not work

This thread is related to https: //stackoverflow.com/questions/50955558/render-fonts-with-sdl2-opengl-es-2-0-glsl-1-0-freetype
I have a problem combining font rendering and using this function as follows:
// Create VBO (Vertex Buffer Object) based on the vertices provided, render the vertices on the
// background buffer and eventually swap buffers to update the display.
// Return index of VBO buffer
GLuint drawVertices(SDL_Window *window, Vertex *vertices, GLsizei numVertices, int mode){
// Number of vertices elements must be provided as a param (see numVertices) because
// sizeof() cannot calculate the size of the type a pointer points to
//GLsizei vertSize = sizeof(vertices[0]);
//SDL_Log("Vertices size is %d, no of vertices is %d", vertSize, numVertices);
// Create a VBO (Vertex Buffer Object)
GLuint VBO = vboCreate(vertices, numVertices);
if (!VBO) {
// Failed. Error message has already been printed, so just quit
return (GLuint)NULL;
}
// Set up for rendering the triangle (activate the VBO)
GLuint positionIdx = 0; // Position is vertex attribute 0
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(positionIdx, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid*)0);
glEnableVertexAttribArray(positionIdx);
if (mode & CLEAR){
// Set color of the clear operation
glClearColor(0.0, 0.0, 0.0, 1.0);
// Clears the invisible buffer
glClear(GL_COLOR_BUFFER_BIT);
}
// Now draw!
// GL_POINTS = Draw only the pixels that correspond to the vertices coordinates
// GL_LINE_STRIP = Draw line that connects the vertices coordinates
// GL_LINE_LOOP = Draw line that connects the vertices coordinates plus a line that re-connects the last coordinate with the first
if (mode & RENDER){ glDrawArrays(GL_LINE_STRIP, 0, numVertices); }
// Don’t forget to flip the buffers as well, to display the final image:
// Update the window
if (mode & UPDATE){ SDL_GL_SwapWindow(window); }
return VBO;
}
This function uses glDrawArrays() to draw a series of lines connecting the provided vertices. Flags CLEAR, RENDER & UPDATE are being used to let me do something like:
drawVertices(window, vertices, sizeOfVertices, CLEAR | RENDER);
drawVertices(window, vertices, sizeOfVertices, RENDER);
drawVertices(window, vertices, sizeOfVertices, RENDER | UPDATE);
I did the same thing with the font rendering function thus enabling me to draw multiple strings in various x,y coordinates. The next two functions do the font rendering based on the code i submitted at the first place and off course your corrections.
void render_text(const char *text, float x, float y, float sx, float sy) {
const char *p;
FT_GlyphSlot g = face->glyph;
SDL_Log("Debug info: glyph w: %d, glyph rows: %d", g->bitmap.width, g->bitmap.rows);
for(p = text; *p; p++) {
// If FT_Load_Char() returns a non-zero value then the glyph in *p could not be loaded
if(FT_Load_Char(face, *p, FT_LOAD_RENDER)){ continue; }
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RED,
g->bitmap.width,
g->bitmap.rows,
0,
GL_RED,
GL_UNSIGNED_BYTE,
g->bitmap.buffer
);
float x2 = x + g->bitmap_left * sx;
float y2 = -y - g->bitmap_top * sy;
float w = g->bitmap.width * sx;
float h = g->bitmap.rows * sy;
GLfloat box[4][4] = {
{x2, -y2 , 0, 0},
{x2 + w, -y2 , 1, 0},
{x2, -y2 - h, 0, 1},
{x2 + w, -y2 - h, 1, 1},
};
glBufferData(GL_ARRAY_BUFFER, sizeof box, box, GL_DYNAMIC_DRAW);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
x += (g->advance.x>>6) * sx;
y += (g->advance.y>>6) * sy;
}
}
void glRenderText(char *text, int _x, int _y, SDL_Color rgb, int mode) {
float x = _x;
float y = _y;
int w=0, h=0;
SDL_GetWindowSize(SDLmain.window, &w, &h);
float xMax = 2.0 / (float)w;
float yMax = 2.0 / (float)h;
GLuint color_loc = glGetUniformLocation( shaderProg, "color" );
float col[4] = { (float)rgb.r/255, (float)rgb.g/255, (float)rgb.b/255, 1 }; // red and opaque
glUniform4fv( color_loc, 1, col);
// Clear invisible buffer
if (mode & CLEAR){ glClearColor(0.0, 0.0, 0.0, 1); glClear(GL_COLOR_BUFFER_BIT); }
// If coordinate system required is:
// COORD_SYS_CONVENTIONAL = (1) left bottom corner is 0,0 and moving towards right and top sides we reach max screen size in pixels e.g 1024 * 768 pixels
// COORD_SYS_CARTECIAN = (2) left bottom corner is -1,-1 and moving towards right and top sides we reach +1,+1 . The center of the display is always 0,0
if (mode & ~COORD_SYS_CARTECIAN){
x = (_x * xMax)-1;
y = (_y * yMax)-1;
}
// Draw the text on the invisible buffer
if (mode & RENDER){ render_text(text, x, y, xMax, yMax); }
// Update display
if (mode & UPDATE){ SDL_GL_SwapWindow(SDLmain.window); }
}
I therefore can do:
glRenderText(tmp, 0, 0, Olive, CLEAR | RENDER | UPDATE);
glRenderText(tmp, 0, 150, Yellow_Green, RENDER);
glRenderText(tmp, 0, 300, Light_Coral, RENDER | UPDATE);
It turns out that i can either render fonts at various x,y coordinates or use the function drawVertices to render lines connecting those vertices BUT not both. That is, i cannot do this:
glRenderText(tmp, 0, 0, Olive, CLEAR | RENDER);
glRenderText(tmp, 0, 150, Yellow_Green, RENDER);
glRenderText(tmp, 0, 300, Light_Coral, RENDER);
drawVertices(window, vertices, sizeOfVertices, RENDER);
drawVertices(window, vertices, sizeOfVertices, RENDER);
drawVertices(window, vertices, sizeOfVertices, RENDER | UPDATE);
As you can tell the logic is that in either functions you have to just with a CLEAR | RENDER flag, then do only RENDER and on your last call to either functions use RENDER | UPDATE.
ISSUES:
(1) In my attempt to do the previous, that is, combinning glRenderText() + drawVertices() i failed because there is clearly something to be setup prior calling them one after the other.
(2) Another issue that i am facing is that running the code on my raspi3 resulted in drawVertices() working fine back when it comes to fonts i could only see the effect of glClearColor() & glClear(GL_COLOR_BUFFER_BIT) which means that the display was cleared with the color setup by glClearColor() but there was no font rendering to be seen. I tried both GL driver mode. There is one called FULL KMS GL driver and another called FAKE KMS GL driver.
Also, in order for the drawVertices() to work i had to comment the code provided below:
FT_Set_Pixel_Sizes(face, 0, 200);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
GLuint vbo;
GLuint attribute_coord=0;
glGenBuffers(1, &vbo);
glEnableVertexAttribArray(attribute_coord);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(attribute_coord, 4, GL_FLOAT, GL_FALSE, 0, 0);
I still had to keep the following code active:
// Load the shader program and set it for use
shaderProg = shaderProgLoad("shaderV1.vert", "shaderV1.frag");
GLuint tex_loc = glGetUniformLocation( shaderProg, "tex" );
GLuint color_loc = glGetUniformLocation( shaderProg, "color" );
// Activate the resulting shaders program
glUseProgram(shaderProg);
glUniform1i( tex_loc, 0 ); // 0, because the texture is bound to of texture unit 0
// Define RGB color + Alpha
float col[4] = { 0.0f, 1.0f, 1.0, 1.0f };
glUniform4fv( color_loc, 1, col);
i was able to resolve the issue by reseting pretty much everything that is not common to the two operations (glRenderText() & drawVertices())
The following code stays as is before calling any of the two functions ()glRenderText() & drawVertices()). These two functions have been updated so that a proper reset is done before reaching the point where glDrawArrays() is executed
// Load the shader program and set it for use
shaderProg = shaderProgLoad("shaderV1.vert", "shaderV1.frag");
// Activate the resulting shaders program
glUseProgram(shaderProg);
// After the shaders (vertex & fragment) have been compiled & linked into a program
// we can query the location index value of a uniform variable existing in the program.
// In this case we are querying uniform variables "tex" that exist in the fragment shader
GLuint tex_loc = glGetUniformLocation( shaderProg, "tex" );
// Set the value of the uniform variable "tex_loc" to 0, because the texture is bound to of texture unit 0
glUniform1i( tex_loc, 0 );
This is the updated function that resets some options so that we get the result we need. For example, glDisable(GL_BLEND); is used to disable blending when it comes to drawing lines. The most important off course is that i use glBindBuffer() to set the appropriate buffer for use by opengl every time drawVertices() is called. glGenBuffers() is used only once when the corresponding object name is 0 meaning that an used object name has not yet been assigned to the vbo.
GLuint drawVertices(SDL_Window *window, GLuint vbo, Vertex *vertices, GLsizei numVertices, SDL_Color rgb, int mode){
float col[4] = { (float)rgb.r/255, (float)rgb.g/255, (float)rgb.b/255, 1.0 };
// Get an available object name for glBindBuffer() when object name is ZERO
if (!vbo){ glGenBuffers(1, &vbo); }
// Check for problems
GLenum err = glGetError();
// Deal with errors
if (err != GL_NO_ERROR) {
// Failed
glDeleteBuffers(1, &vbo);
SDL_Log("Creating VBO failed, code %u\n", err);
vbo = 0;
}
else if (!vbo) {
// Failed. Error message has already been printed, so just quit
return (GLuint)NULL;
}
if (mode & CLEAR){
// Set color of the clear operation
glClearColor(0.0, 0.0, 0.0, 1.0);
// Clears the invisible buffer
glClear(GL_COLOR_BUFFER_BIT);
}
if (mode & RENDER){
// Dissable blending when drawing lines
glDisable(GL_BLEND);
// Set up for rendering the triangle (activate the vbo)
// Position is vertex attribute 0
GLuint attribute_coord = 0;
// Specifies the index of the generic vertex attribute and enables it
glEnableVertexAttribArray(attribute_coord);
// Set the buffer to be used from now on
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// Define an array of generic vertex attribute data
glVertexAttribPointer(attribute_coord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid*)0);
// Get the location of the uniform variable "color_loc"
GLuint color_loc = glGetUniformLocation( shaderProg, "color" );
// Set the value of the uniform variable "color_loc" to array "col"
glUniform4fv( color_loc, 1, col);
// Copy vertices into buffer
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * numVertices, vertices, GL_STATIC_DRAW);
// Now draw!
// GL_POINTS = Draw only the pixels that correspond to the vertices coordinates
// GL_LINE_STRIP = Draw line that connects the vertices coordinates
// GL_LINE_LOOP = Draw line that connects the vertices coordinates plus a line that re-connects the last coordinate with the first
// GL_TRIANGLE_FAN =
glDrawArrays(GL_LINE_STRIP, 0, numVertices);
}
// Don’t forget to flip the buffers as well, to display the final image:
// Update the window
if (mode & UPDATE){ SDL_GL_SwapWindow(window); }
return vbo;
}
Things work pretty much the same way for function glRenderText()
// render_text is called by glRenderText()
void render_text(const char *text, float x, float y, float sx, float sy) {
const char *p;
FT_GlyphSlot g = face->glyph;
//SDL_Log("Debug info: glyph w: %d, glyph rows: %d", g->bitmap.width, g->bitmap.rows);
for(p = text; *p; p++) {
// If FT_Load_Char() returns a non-zero value then the glyph in *p could not be loaded
if(FT_Load_Char(face, *p, FT_LOAD_RENDER)){ continue; }
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RED,
g->bitmap.width,
g->bitmap.rows,
0,
GL_RED,
GL_UNSIGNED_BYTE,
g->bitmap.buffer
);
float x2 = x + g->bitmap_left * sx;
float y2 = -y - g->bitmap_top * sy;
float w = g->bitmap.width * sx;
float h = g->bitmap.rows * sy;
GLfloat box[4][4] = {
{x2, -y2 , 0, 0},
{x2 + w, -y2 , 1, 0},
{x2, -y2 - h, 0, 1},
{x2 + w, -y2 - h, 1, 1},
};
glBufferData(GL_ARRAY_BUFFER, sizeof box, box, GL_DYNAMIC_DRAW);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
x += (g->advance.x>>6) * sx;
y += (g->advance.y>>6) * sy;
}
}
GLuint glRenderText(char *text, int fontSize, GLuint vbo, int _x, int _y, SDL_Color rgb, int mode) {
float x = _x;
float y = _y;
float xMax = 2.0 / (float)getWindowWidth();
float yMax = 2.0 / (float)getWindowHeight();
GLuint attribute_coord=0;
float col[4] = { (float)rgb.r/255, (float)rgb.g/255, (float)rgb.b/255, 1 };
// Enable blending when drawing fonts
glEnable(GL_BLEND);
// Set the W & H of the font loaded
FT_Set_Pixel_Sizes(face, 0, fontSize);
// If vbo is ZERO setup and get an object name
if (!vbo){
// Enables blending operations
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Specifies the alignment requirements for the start of each pixel row in memory
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Save into vbo one unused buffer name (index) for use with glBindBuffer
glGenBuffers(1, &vbo);
// Specifies the index of the generic vertex attribute and enables it
glEnableVertexAttribArray(attribute_coord);
}
// Set the buffer to be used from now on to the one indicated by vbo
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// Define an array of generic vertex attribute data
glVertexAttribPointer(attribute_coord, 4, GL_FLOAT, GL_FALSE, 0, 0);
GLuint color_loc = glGetUniformLocation( shaderProg, "color" );
// Set the value of the uniform variable "color_loc" from array "col"
glUniform4fv( color_loc, 1, col);
// Clear invisible buffer
if (mode & CLEAR){
glClearColor(0.0, 0.0, 0.0, 1);
glClear(GL_COLOR_BUFFER_BIT);
}
// If coordinate system required is:
// COORD_SYS_CONVENTIONAL = (1) left bottom corner is 0,0 and moving towards right and top sides we reach max screen size in pixels e.g 1024 * 768 pixels
// COORD_SYS_CARTECIAN = (2) left bottom corner is -1,-1 and moving towards right and top sides we reach +1,+1 . The center of the display is always 0,0
if (mode & ~COORD_SYS_CARTECIAN){
x = (_x * xMax)-1;
y = (_y * yMax)-1;
}
// Draw the text on the invisible buffer
if (mode & RENDER){ render_text(text, x, y, xMax, yMax); }
// Update display
if (mode & UPDATE){ SDL_GL_SwapWindow(SDLmain.window); }
return vbo;
}
The logic is that 2 vbos (one for drawing line through drawVertices and one for drawing fonts through glRenderText()) are defined in main() or in the global scope and are passed to glRenderText() & drawVertices() . These two functions update the values of their local copies and return the vbos so that the vbo in main or in the global scope get updated. That off course could be done by passing them entirely by reference instead of adopting my approach.
I have not yet tested the functionality in my raspi3 though. I will get back real soon with an update. Anyway, the functions given above are fully functional.
Thanks again for your time.

deformed object when importing obj file to openGL code

I am relatively new to openGL and 3D drawing.
I drew a little person in Blender and exported its verteces in a .objfile so I can render that person using my C-code.
this is the person I drew:
The problem
When I import it in my code I don't see a person any more but a horrible ugly mess:
What I think
In the beginning when I tested my software I drew a cube and imported those vertices in my code. I saw that I only had 6 v-vertices. this gave me a partial cube ( hence one side is not closed).
So maybe it has something to do with that.
What can I do to solve this issue? Where did I make a mistake? Has it something to do with the projection or the export from blender?
This is my C code
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <GL/glut.h>
#include "myVertices.h"
float xCamera = 3.0;
float yCamera = 3.0;
float zCamera = 0.0;
float barelRollCamera = 0.0;
void init(void)
{
glClearColor(1.0, 1.0, 1.0, 1.0);
glEnable(GL_DEPTH_TEST);
}
void drawVertex(GLfloat vertex[][3], int numberVertex, int shape)
{
int i;
glPushMatrix();
switch(shape)
{
case 0: glBegin(GL_LINES); break;
case 1: glBegin(GL_POLYGON); break;
default: break;
}
for(i=0;i<numberVertex;i++)
{
glVertex3fv(vertex[i]);
}
glEnd();
glPopMatrix();
}
void drawScene(void)
{
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(xCamera,yCamera,3.0, 0.0,0.0,0.0, 0.0,1.0,barelRollCamera);
glColor3f(0.0, 0.0, 0.0);
//draws axis
drawVertex(assen,6,0);
//draws person I drew in blender using the vertices in my header file
drawVertex(person,1038,1);
glFlush();
}
void herschaal(){
glViewport(0,0,500,500);
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glOrtho(-6.500, 6.500, -6.500, 6.500, -6.00, 12.00);
}
int main( int argc, char * argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH );
glutInitWindowPosition(50, 100);
glutInitWindowSize(1000, 1000);
glutCreateWindow("test");
glutReshapeFunc(herschaal);
init();
glutDisplayFunc(drawScene);
glutMainLoop();
return 0;
}
the original export from blender: http://hastebin.com/owubizotuv.hs
the file with v-vertices I use, aka myVertices.h: http://hastebin.com/lirajuhiqe.avrasm

Boundary Fill Algorithm in Opengl

The algorithm goes into recursion. It prints out blue lines on the polygon but at a particular location (X=397,Y=98). The color doesn't get printed at the pixel and then goes into a recursion for the same pixel and its neighbors.
#include<GL/gl.h>
#include<GL/glu.h>
#include<GL/glut.h>
typedef struct pixel{ GLubyte red, green, blue; } pixel;
void boundaryfill(float x,float y, pixel fill, pixel boundary)
{
pixel c;
glReadPixels(x, y, 1, 1, GL_RGB, GL_UNSIGNED_BYTE, &c);
// printf("%d,%d,%d",(int)c.red,(int)c.green,(int)c.blue);
if ((c.red!=boundary.red)&&(c.red!=boundary.blue)&&(c.green!=boundary.green)&&(c.green!=fill.green)&&(c.blue!=fill.blue)&&(c.red!=fill.red)&&\
(x<=400))//&&(y<=100)&&(y>=50)&&(x>=200))
{
glBegin(GL_POINTS);
glColor3ub(fill.red,fill.green,fill.blue);
glVertex2f(x,y);
glEnd();
glFlush();
glReadPixels(x, y, 1, 1, GL_RGB, GL_UNSIGNED_BYTE, &c);
printf("\nCOlOR %d,%d,%d",(int)c.red,(int)c.green,(int)c.blue);
printf("\nX=%f,Y=%f",x,y)
//
boundaryfill(x+1,y,fill,boundary);
boundaryfill(x-1,y,fill,boundary);
boundaryfill(x,y+1,fill,boundary);
boundaryfill(x,y-1,fill,boundary);
}
}
void mydisplay()
{
glBegin(GL_POLYGON);
glColor3ub(10,10,10);
glVertex2f(200,50);
glVertex2f(200,100);
glVertex2f(400,100);
glVertex2f(400,50);
glEnd();
glFlush();
pixel fill,boundary;
fill.red=0;
fill.green=0;
fill.blue=255;
boundary.red=255;
boundary.green=255;
boundary.blue=255;
boundaryfill(300,75,fill,boundary);
glEnd();
glFlush();
}
void main(int argc,char **argv)
{
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(400,400);
glutInitWindowPosition(540,320);
glutCreateWindow("my first attempt");
glClearColor(1.0f,1.0f,1.0,0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glutDisplayFunc(mydisplay);
gluOrtho2D(0.0,400.0,0.0,400.0);
glutMainLoop();
}
I suspect the problem is that you're overflowing the stack due to infinite recursion. What prevents the program from just calling boundaryfill forever?
Also, you're missing a call to glEnd inside boundaryfill, but I suspect that's not the reason for the crash.

Resources