Related
This question already has answers here:
Perspective correct texturing of trapezoid in OpenGL ES 2.0
(3 answers)
Closed 1 year ago.
I'm trying to apply a square texture to a trapezoid-like shape in OpenGL but I get some distortion. I've been reading a lot on possible solutions and the one that seems most convenient requires modifying the "q" texture coordinates. This is done using GlTexCoord functions in the solution; however, I'm using vertex buffers and I don't know how I can use them to change this coordinate this way. The texture init in GLSL takes a vec2; so I have no idea how I would pass anything but two-dimensional texture coordinates to it.
main.c
//C libs
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
//GL libs (Need to have set up to run this)
#include <glad/glad.h>
#include <GLFW/glfw3.h>
//Libs in folder
#include "shader.h"
#include "image.h"
//Window consts
#define WINDOW_HEIGHT 500
#define WINDOW_WIDTH 500
#define WINDOW_NAME "ForStackOverflow"
//Shader consts
#define VERTEX_SHADER_FILE "vertex_shader.glsl"
#define FRAGMENT_SHADER_FILE "fragment_shader.glsl"
//Vertex constants
#define POSITION_ATTRIBUTE_LOC 0
#define TEXTURE_COORD_ATTRIBUTE_LOC 1
#define POSITION_SIZE 2
#define TEXTURE_COORD_SIZE 2
#define VERTEX_SIZE (POSITION_SIZE + TEXTURE_COORD_SIZE) //Amount of floats per vertex
#define POSITION_OFFSET 0
#define TEXTURE_COORD_OFFSET (POSITION_SIZE * sizeof(float))
#define STRIDE (sizeof(float) * VERTEX_SIZE)
//Functions
static void framebuffer_size_callback(GLFWwindow*, int, int);
static unsigned int load_bmp_texture(const char* name);
int main()
{
printf("Running!\n");
//*GLFW
if (!glfwInit())
{
printf("GLFW init fail\n");
return -1;
}
//3.3 core
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//*Window object
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGHT, WINDOW_NAME, NULL, NULL);
if (window == NULL)
{
printf("GLFW window fail\n");
return -1;
}
glfwMakeContextCurrent(window);
//*GLAD
if (!gladLoadGLLoader((GLADloadproc) &glfwGetProcAddress))
{
printf("GLAD init fail");
glfwTerminate();
return -1;
}
//*Window
glViewport(0, 0, WINDOW_WIDTH, WINDOW_HEIGHT);
glfwSetFramebufferSizeCallback(window, &framebuffer_size_callback);
//*Shaders
ShaderProgram shader_program;
if (!shader_program_init(&shader_program, VERTEX_SHADER_FILE, FRAGMENT_SHADER_FILE)) {
glfwTerminate();
return -1;
}
//*Triangle rendering
//Vertices
float tri_vertices[4 * VERTEX_SIZE] = { //FORM A TRAPEZOID
//Position //Texture coordinates
-0.5f, 0.5f, 0.0f, 1.0f, //Top-left
-0.5f, -0.5f, 0.0f, 0.0f, //Bottom-left
0.5f, 0.75f, 1.0f, 1.0f, //Top-right
0.5f, -0.75f, 1.0f, 0.0f //Bottom-right
};
//Indices
unsigned int tri_indices[6] = {
2, 0, 1, //Top-right, top-left, bottom-left
2, 3, 1 //Top-right, bottom-right, bottom-left
};
//VAO
unsigned int tri_vao;
glGenVertexArrays(1, &tri_vao);
glBindVertexArray(tri_vao);
//VBO
unsigned int tri_vbo;
glGenBuffers(1, &tri_vbo);
glBindBuffer(GL_ARRAY_BUFFER, tri_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(tri_vertices), tri_vertices, GL_STATIC_DRAW);
//EBO
unsigned int tri_ebo;
glGenBuffers(1, &tri_ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, tri_ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(tri_indices), tri_indices, GL_STATIC_DRAW);
//Config
//Position
glVertexAttribPointer(POSITION_ATTRIBUTE_LOC, POSITION_SIZE, GL_FLOAT, GL_FALSE, STRIDE, (void*) POSITION_OFFSET);
glEnableVertexAttribArray(POSITION_ATTRIBUTE_LOC);
//Texture coordinates
glVertexAttribPointer(TEXTURE_COORD_ATTRIBUTE_LOC, TEXTURE_COORD_SIZE, GL_FLOAT, GL_FALSE, STRIDE, (void*) TEXTURE_COORD_OFFSET);
glEnableVertexAttribArray(TEXTURE_COORD_ATTRIBUTE_LOC);
//*Textures
unsigned int brick_tex = load_bmp_texture("purple_bricks.bmp");
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, brick_tex);
//Attaching to uniform
glUseProgram(shader_program);
glUniform1i(glGetUniformLocation(shader_program, "brick"), 0);
//*Rendering setup
//Shader
glUseProgram(shader_program);
//*Main loop
while (!glfwWindowShouldClose(window))
{
//*Blittering
//Background
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
//Triangles
glBindVertexArray(tri_vao);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, tri_ebo);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, (void*) 0);
//*Buffer, events
glfwSwapBuffers(window);
glfwPollEvents();
}
//*End program
glDeleteVertexArrays(1, &tri_vao);
glDeleteBuffers(1, &tri_vbo);
glDeleteBuffers(1, &tri_ebo);
glfwTerminate();
return 0;
}
//Centers the OpenGL part of the window and keeps the same width and height
static void framebuffer_size_callback(GLFWwindow* window, int width, int height) {
glViewport((width - WINDOW_WIDTH) / 2, (height - WINDOW_HEIGHT) / 2, WINDOW_WIDTH, WINDOW_HEIGHT);
}
//Loads and sets up a BMP texture
static unsigned int load_bmp_texture(const char* name) {
//Loading into array
RGBImage image;
read_bmp(name, &image);
//Generating texture in GL
unsigned int texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
//Setting mapping
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); //X
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); //Y
//Setting filtering
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
//Setting texture and mipmap
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width, image.height, 0, GL_RGB, GL_UNSIGNED_BYTE, image.data);
glGenerateMipmap(GL_TEXTURE_2D);
//Freeing image array
free_RGBImage(image);
return texture;
}
image.h
//Code for loading a bmp file as an array
//Definitely not part of the problem
//24 bit bmps
//C libs
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <stdbool.h>
//Prevent struct packing
#pragma pack(1)
typedef struct RGB {
unsigned char R;
unsigned char G;
unsigned char B;
} RGB;
typedef struct RGBImage {
int width;
int height;
RGB* data;
} RGBImage;
typedef struct BMPFileHeader {
char name[2];
uint32_t size;
uint32_t garbage;
uint32_t image_offset;
} BMPFileHeader;
typedef struct BMPInfoHeader {
uint32_t header_size;
uint32_t width;
uint32_t height;
uint16_t color_planes;
uint16_t bits_per_pixel;
uint32_t compression;
uint32_t image_size;
} BMPInfoHeader;
void free_RGBImage(RGBImage image) {
free(image.data);
}
bool read_bmp(const char* file_name, RGBImage* image) {
FILE* fp = fopen(file_name, "rb");
if (fp == NULL) {
printf("Couldn't open %s\n", file_name);
return false;
}
BMPFileHeader file_header;
fread(file_header.name, sizeof(BMPFileHeader), 1, fp);
if ((file_header.name[0] != 'B') || (file_header.name[1] != 'M')) {
fclose(fp);
return false;
}
BMPInfoHeader info_header;
fread(&info_header, sizeof(BMPInfoHeader), 1, fp);
if ((info_header.header_size != 40) || (info_header.compression != 0) || (info_header.bits_per_pixel != 24)) {
fclose(fp);
return false;
}
fseek(fp, file_header.image_offset, SEEK_SET);
image->width = info_header.width;
image->height = info_header.height;
image->data = (RGB*) malloc(image->height * image->width * sizeof(RGB));
for (int i = 0; i < image->height; i++) {
fread(&image->data[i * image->width], sizeof(RGB), image->width, fp);
}
int R;
for (int i = 0; i < image->height * image->width; i++) {
R = image->data[i].R;
image->data[i].R = image->data[i].B;
image->data[i].B = R;
}
fclose(fp);
return true;
}
shader.h
//Code for compiling and linking a shader
//Definitely not part of the problem
//C libs
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
//GL libs
#include <glad/glad.h>
#include <GLFW/glfw3.h>
//Consts
#define INFO_LOG_SIZE 512
static bool _glad_is_init();
static bool _glfw_is_init();
typedef unsigned int ShaderProgram;
bool shader_program_init(ShaderProgram* shader_program, char* vertex_shader_file_name, char* fragment_shader_file_name)
{
if (!_glfw_is_init()) {
printf("Shader: glfw uninitialized\n");
return 0;
}
else if (!_glad_is_init()) {
printf("Shader: glad uninitialized\n");
return 0;
}
long int file_size;
size_t new_source_length;
FILE* fp;
//*Reading vertex shader file
//Open
fp = fopen(vertex_shader_file_name, "r");
if (fp == NULL) {
printf("Couldn't open vertex shader file\n");
return 0;
}
//Find length for buffer
fseek(fp, 0L, SEEK_END);
file_size = ftell(fp);
if (file_size == -1) {
printf("Couldn't seek end of file\n");
return 0;
}
rewind(fp);
char vertex_shader_source[(file_size + 1) * sizeof(char)];
//Read
new_source_length = fread(vertex_shader_source, sizeof(char), file_size, fp);
if (ferror(fp) != 0) {
printf("Error when reading file\n");
return 0;
}
//Add string termination
vertex_shader_source[new_source_length] = '\0';
//Close
fclose(fp);
//*Reading fragment shader
//Open
fp = fopen(fragment_shader_file_name, "r");
if (fp == NULL) {
printf("Couldn't open fragment shader file\n");
return 0;
}
//Find length for buffer
fseek(fp, 0L, SEEK_END);
file_size = ftell(fp);
if (file_size == -1) {
printf("Couldn't seek end of file\n");
return 0;
}
rewind(fp);
char fragment_shader_source[(file_size + 1) * sizeof(char)];
//Read
new_source_length = fread(fragment_shader_source, sizeof(char), file_size, fp);
if (ferror(fp) != 0) {
printf("Error reading file\n");
return 0;
}
//Add string termination
fragment_shader_source[new_source_length] = '\0';
//Close
fclose(fp);
//*Compiling
//For error checking
int success;
char infolog[INFO_LOG_SIZE];
const char* vertex_shader_code = vertex_shader_source; //vertex_shader_source is of type char foo[n], a VLA.
const char* fragment_shader_code = fragment_shader_source;
//Vertex
unsigned int vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_code, NULL);
glCompileShader(vertex_shader);
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertex_shader, INFO_LOG_SIZE, NULL, infolog);
printf("Vertex shader compile fail: \n%s\n", infolog);
return 0;
}
//Fragment
unsigned int fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_code, NULL);
glCompileShader(fragment_shader);
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragment_shader, INFO_LOG_SIZE, NULL, infolog);
printf("Fragment shader compile fail: \n%s\n", infolog);
return 0;
}
//Program
*shader_program = glCreateProgram();
glAttachShader(*shader_program, vertex_shader);
glAttachShader(*shader_program, fragment_shader);
glLinkProgram(*shader_program);
glGetProgramiv(*shader_program, GL_LINK_STATUS, &success);
if (!success) {
glGetProgramInfoLog(*shader_program, INFO_LOG_SIZE, NULL, infolog);
printf("Shader program compile fail: \n%s\n", infolog);
return 0;
}
//Deleting
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
return 1;
}
bool _glfw_is_init() {
if (!glfwInit())
{
return false;
}
return true;
}
bool _glad_is_init() {
if (!gladLoadGLLoader((GLADloadproc) &glfwGetProcAddress))
{
return false;
}
return true;
}
vertex_shader.glsl
#version 330 core
layout (location = 0) in vec2 vertex_position;
layout (location = 1) in vec2 vertex_texture_coordinate;
out vec2 texture_coordinate;
void main()
{
gl_Position = vec4(vertex_position, 1.0, 1.0);
texture_coordinate = vertex_texture_coordinate;
};
fragment_shader.glsl
#version 330 core
in vec4 color;
in vec2 texture_coordinate;
out vec4 FragColor;
uniform sampler2D brick;
void main()
{
FragColor = texture(brick, texture_coordinate);
};
Texture used
Result of program
Edit: for those reading in the future, this link helped a lot with implementing the method described in the answer below:
https://www.cs.cmu.edu/~16385/s17/Slides/10.2_2D_Alignment__DLT.pdf
Given the 2d vertex coordinates P[i] and the 2d texture coordinates T[i] you need to find the homography that maps from T[i] to P[i]. The homography H is represented with a 3x3 matrix (up to a scaling factor) and can be calculated, for example, with the direct linear transform method. Beware that it involves solving a system of eight equations with eight/nine unknowns -- so it's best to resort to an existing library, like LAPACK, for that.
The homography satisfies the relations
H * T[i] ~ P[i]
as an equivalence on the projective plane. I.e.
H * (T[i], 1) = a[i] * (P[i], 1)
for some scaling factors a[i].
The trick to get the correct texture mapping is to replace the 2d vertex coordinates with 3d homogeneous ones:
homogeneous P[i] = H * (T[i], 1)
The vertex shader will then receive a vec3 vertex position, and copy the 3rd coordinate to gl_Position.w:
layout (location = 0) in vec3 vertex_position;
...
void main() {
gl_Position = vec4(vertex_position.xy, 0, vertex_position.z);
...
}
Here are the results I get with this method (corrected/uncorrected):
I have a very simple example of a OpenGL ES program that I'm trying to get to run on RaspiOS Desktop (a.k.a. Raspbian) on Raspberry Pi 4.
My goal is very simple - to draw a red triangle in the center of the screen. However, the triangle comes out as white instead of red.
I've searched and tried everything and wasn't able to find any help. I'm very frustrated at this point because this was just supposed to be the first tutorial to introduce the world of OpenGL ES and I'm already stuck and can't continue with more complicated examples.
Anyway, here's the full example
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <stdio.h>
#include <stdbool.h>
static struct glData {
GLuint program;
GLuint vbo;
} glData;
const char vert_shader_source[] = "#version 300 es \n"
"precision mediump float; \n"
"layout (location = 0) in vec3 Position; \n"
"void main() \n"
"{ \n"
" gl_Position = vec4(Position, 1.0); \n"
"} \n";
const char frag_shader_source[] = "#version 300 es \n"
"precision mediump float; \n"
"out vec4 fragColor; \n"
"void main() \n"
"{ \n"
" fragColor = vec4(1.0f, 0.0f, 0.0f, 1.0f); \n"
"} \n";
#define POSITION 0
bool initWindow(int* argc, char** argv)
{
glutInit(argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_MULTISAMPLE);
glutCreateWindow("Triangle");
GLenum glew_status = glewInit();
if (glew_status != GLEW_OK) {
fprintf(stderr, "Error: %s\n", glewGetErrorString(glew_status));
return false;
}
return true;
}
static GLuint buildShader(const char* shader_source, GLenum type)
{
GLuint shader;
GLint status;
shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to compile shader\n");
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetShaderInfoLog(shader, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
return false;
}
return true;
}
static GLuint createAndLinkProgram(GLuint v_shader, GLuint f_shader)
{
GLuint program;
GLint linked;
program = glCreateProgram();
if (program == 0) {
fprintf(stderr, "failed to create program\n");
return 0;
}
glAttachShader(program, v_shader);
glAttachShader(program, f_shader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (linked != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to link program\n");
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetProgramInfoLog(program, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
glDeleteProgram(program);
return 0;
}
return program;
}
static bool initProgram()
{
GLuint v_shader, f_shader;
v_shader = buildShader(vert_shader_source, GL_VERTEX_SHADER);
if (v_shader == 0) {
fprintf(stderr, "failed to build vertex shader\n");
return false;
}
f_shader = buildShader(frag_shader_source, GL_FRAGMENT_SHADER);
if (f_shader == 0) {
fprintf(stderr, "failed to build fragment shader\n");
glDeleteShader(v_shader);
return false;
}
glReleaseShaderCompiler(); // should release resources allocated for the compiler
glData.program = createAndLinkProgram(v_shader, f_shader);
if (glData.program == 0) {
fprintf(stderr, "failed to create and link program\n");
glDeleteShader(v_shader);
glDeleteShader(f_shader);
return false;
}
glUseProgram(glData.program);
// this won't actually delete the shaders until the program is closed but it's a good practice
glDeleteShader(v_shader);
glDeleteShader(f_shader);
return true;
}
bool setupOpenGL()
{
if (!initProgram()) {
fprintf(stderr, "failed to initialize program\n");
return false;
}
GLfloat vVertices[] = {
-0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
};
glClearColor(0, 0, 0, 1);
glGenBuffers(1, &glData.vbo);
glBindBuffer(GL_ARRAY_BUFFER, glData.vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vVertices), vVertices, GL_STATIC_DRAW);
return true;
}
void reshape(int width, int height)
{
glViewport(0, 0, width, height);
}
void drawTriangle()
{
glClear(GL_COLOR_BUFFER_BIT);
glEnableVertexAttribArray(POSITION);
glBindBuffer(GL_ARRAY_BUFFER, glData.vbo);
glVertexAttribPointer(POSITION, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(POSITION);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
printf("initialize window\n");
if (!initWindow(&argc, argv)) {
fprintf(stderr, "failed to initialize window\n");
return EXIT_FAILURE;
}
printf("setup opengl\n");
if (!setupOpenGL()) {
fprintf(stderr, "failed to setup opengl\n");
return EXIT_FAILURE;
}
glutDisplayFunc(drawTriangle);
glutReshapeFunc(reshape);
glutMainLoop();
glDeleteProgram(glData.program);
return EXIT_SUCCESS;
}
Before you run it, you need to:
Run sudo raspi-config
Go to Advanced Options > GL Driver
Enable GL (Fake KMS)
Reboot
Then you can compile and run it like this:
gcc triangle.c -Wall -lm -lglut -lGLEW -lGL -o triangle
./triangle
At first I thought maybe it's some bug in the driver or something. But then I found this example and tried to run it and it draws some graphs with multiple colors and it's fine.
I'd appreaciate any help. I've been trying to debug this for days now.
Nvm, turns out I'm an idiot.
This entire time it was a simple typo in buildShader() function. Here's the fixed version of that function:
static GLuint buildShader(const char* shader_source, GLenum type)
{
GLuint shader;
GLint status;
shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE) {
int length;
char* log;
fprintf(stderr, "failed to compile shader\n");
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
if (length > 1) {
log = calloc(length, sizeof(char));
glGetShaderInfoLog(shader, length, &length, log);
fprintf(stderr, "%s\n", log);
free(log);
}
return 0;
}
return shader;
}
The problem was that I was accidentally returning true/false instead of the shader. I have absolutely no idea how the program still managed to run without an error and display a white triangle but that's how it is.
I'm using OpenGL 3 and Glew in order to draw a triangle, I have a window (changing the background color works fine) but I can't put my shader on it. I did some tests like:
glGetProgramiv(shader_programme, GL_LINK_STATUS, &isLinked);
printf("\nProg : %i",isLinked);
And it's fine; print returns 1 for the program, the vertex and the frag.
I suppose I missed a clear somewhere, but I'm not sure and also pretty lost here...
This is my code:
#include "../include/scop.h"
#include <OpenGL/gl.h>
#include ".../lfw3/3.2.1/include/GLFW/glfw3.h"
t_scop *ft_init_window(t_scop *scop, t_parse parse)
{
if (!glfwInit())
ft_putstr("error init");
else
{
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
WIN = glfwCreateWindow(WIN_X, WIN_Y, "Scop", NULL, NULL);
glfwMakeContextCurrent(WIN);
glfwSetInputMode(WIN, GLFW_STICKY_KEYS, GL_TRUE);
glfwSetInputMode(WIN, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
glfwPollEvents();
glfwSetCursorPos(WIN, WIN_X / 2.0, WIN_Y / 2.0);
glClearColor(0.0f, 0.5f, 0.4f, 0.0f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
}
float points[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
//init buffer and fill it
GLuint vbo = 0;
glGenBuffers (1, &vbo);
glBindBuffer (GL_ARRAY_BUFFER, vbo);
glBufferData (GL_ARRAY_BUFFER, 9 * sizeof (float), points, GL_STATIC_DRAW);
//init VertexArray
GLuint vao = 0;
glGenVertexArraysAPPLE (1, &vao);
glBindVertexArrayAPPLE (vao);
glEnableVertexAttribArray (0);
glBindBuffer (GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer (0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glDrawArrays(GL_TRIANGLES, 0, 3);
const char* vertex_shader =
"#version 330 core\n"
"layout (location = 0) in vec3 position;\n"
"void main () {"
"gl_Position.xyz = position;"
"gl_Position.w = 1.0;"
"}\0";
const char* fragment_shader =
"#version 330 core\n"
"out vec3 color;"
"void main () {"
"color = vec3(1,0,0);"
"}\0";
//create vertex
GLuint vs = glCreateShader (GL_VERTEX_SHADER);
glShaderSource (vs, 1, &vertex_shader, NULL);
glCompileShader (vs);
//tests
GLint success = 0;
glGetShaderiv(vs, GL_COMPILE_STATUS, &success);
printf ("Taille du source:%i\n", success);
if (GL_FALSE == success)
printf("false");
else printf("true");
//create frag
GLuint fs = glCreateShader (GL_FRAGMENT_SHADER);
glShaderSource (fs, 1, &fragment_shader, NULL);
glCompileShader (fs);
//tests
success = 0;
glGetShaderiv(fs, GL_COMPILE_STATUS, &success);
printf("Taille fs : %i",success);
// GLuint shader_programme = LoadShaders (vs,fs);
GLint shader_programme = glCreateProgram ();
glAttachShader (shader_programme, vs);
glAttachShader (shader_programme, fs);
glLinkProgram (shader_programme);
//tests
GLint isLinked = 0;
glGetProgramiv(shader_programme, GL_LINK_STATUS, &isLinked);
printf("\nProg : %i",isLinked);
//idk if i need to do this now
glDetachShader(shader_programme, vs);
glDetachShader(shader_programme, fs);
glDeleteShader(vs);
glDeleteShader(fs);
glGetError();
while (!glfwWindowShouldClose(WIN))
{
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearDepth(GL_DEPTH_TEST);
glUseProgram (shader_programme);
glBindVertexArrayAPPLE (vao);
glDrawArrays (GL_TRIANGLES, 0, 3);
//glUseProgram(0); ???
glfwPollEvents ();
glBindVertexArrayAPPLE (0);
glfwSwapBuffers(WIN);
}
// glfwTerminate();
return (scop);
}
Any help is greatly appreciated!
The problem lies in this line:
glClearDepth(GL_DEPTH_TEST);
glClearDepth (doc) specifies with which value the depth buffer should be cleared and expects a floating point value between 0 and 1. It is the similar to glClearColor, just for depth.
Additionally, you should be using the core profile VAO functions instead of the ones from the APPLE extension. The apple extension should only be used in a OpenGL context <= 2.1.
Ok. I'm getting into opengl 2.1(without the fixed function stuff) and I'm having troubles with shaders. I declare my uniform variable on my shader, and in my program I get the uniform location and assign it a value with gluniform but it doesn't seem to work.
These are my shaders
fragment shader:
#version 120
varying float color;
void
main ()
{
gl_FragColor = vec4(color, 0, 0, 1);
}
and my vertex shader:
#version 120
attribute vec2 position;
varying float color;
uniform float pr_color;
void
main ()
{
color = pr_color;
gl_Position = vec4(position, 0.0, 1.0);
}
This is how I'm passing the data to the shader:
void
display ()
{
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(g_program);
pos_loc = glGetAttribLocation(g_program, "position");
col_loc = glGetUniformLocation(g_program, "pr_color");
glUniform1f(col_loc, 1.0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(pos_loc);
glVertexAttribPointer(pos_loc, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glDrawArrays(GL_TRIANGLES, 0, 3);
glutPostRedisplay();
glutSwapBuffers();
}
and i don't think this is needed, but in any case, my initgl function
void
init ()
{
// Set clear color to black
glClearColor(0.0,0.0,0.0,0.0);
vshader = createShader(GL_VERTEX_SHADER, "vertex.glsl");
fshader = createShader(GL_FRAGMENT_SHADER, "fragment.glsl");
g_program = createProgram(vshader, fshader);
// Create vbo and send it the vertex data
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
}
the thing is, my shaders are working, if I hardcode the color into the fragment sahder, then the triangle is drawn, but if I pass the color to the vertex shader, it doesn't work, the triangle is drawn to black, which is strange, because my position attribute works, but my color uniform is set to 0. As you can see, I'm setting useprogram() before passing the uniform value.
EDIT: I changed the gluniform1i(col_loc, 1) to glUniformif(col_loc, 1.0). Still doesn't work
EDIT: I'll add my shader loading function to make sure the problem is not in the shader
GLuint
createShader (GLenum type, char* filename)
{
GLuint shader;
// Load file
FILE* file;
file = fopen(filename, "r");
if (file == NULL)
{
printf("Error reading file \n");
}
// Get Length
fseek(file, 0, SEEK_END);
long length = ftell(file);
fseek(file, 0, SEEK_SET);
// Get source
char* source;
source = malloc( (size_t) length + 1);
if (source == NULL)
{
printf("Error alocating space for shader\n");
}
// Read file
fread(source, 1, length, file);
// Close file
fclose(file);
source[length] = '\n';
// Create shader, attach it's source and compile it
shader = glCreateShader(type);
glShaderSource(shader, 1, (const GLchar*)&source, &length);
free(source); // Free shader source, once it's attached
glCompileShader(shader);
// Check for errors
GLint shader_status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &shader_status);
if (!shader_status) {
fprintf(stderr, "Failed to compile %s:\n", filename);
GLint log_length;
char *log;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
log = malloc(log_length);
glGetShaderInfoLog(shader, log_length, NULL, log);
fprintf(stderr, "%s", log);
free(log);
glDeleteShader(shader);
return 0;
}
return shader;
}
When there's an error in the shader, the program actually prints the error log, so I don't think the error is in here, but anyway, here it is.
EDIT: program linker code
GLuint
createProgram (GLuint vertexs, GLuint fragments)
{
GLint program_ok;
// Create program and attach to shaders
GLuint program = glCreateProgram();
glAttachShader(program, vertexs);
glAttachShader(program, fragments);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &program_ok);
if (!program_ok) {
fprintf(stderr, "Failed to link shader program:\n");
GLint log_length;
char *log;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
log = malloc(log_length);
glGetProgramInfoLog(program, log_length, NULL, log);
fprintf(stderr, "%s", log);
free(log);
glDeleteProgram(program);
return 0;
}
return program;
}
Please tell me if there's anything wrong with my shaders/program, I was just starting to get the basics of programmable pipeline and now I can't even render a triangle
You are using
glUniform1i(col_loc, 1);
To set the value of
uniform float pr_color;
1i stands for 1 int. Use 1f for 1 float.
glUniform1f(col_loc, 1.0f);
OK, I got it working with glUnifrom4fv(). I am now passing a vec4 to the shader's color variable and it works!
I am trying to render a single char as a demo in opengl by utilizing the bitmap buffer offered by freetype glyphs. I know my triangle fan is correct because right now i see a black textured triangle_fan against a green background. Ideally i should be seeing a character in my triangle_fan primitive instead of just a full solid black square.
void fontDataNums_init(const char * fname) {
float h = font.h = 16;
/*Dynamically allocated variables, clean before exit */
FT_Face face;
FT_Library library;
GLubyte * expanded_data;
/* Create And Initilize A FreeType Font Library. */
if(FT_Init_FreeType( &library )) {
printf("fontDataNums_init::FT_Init_FreeType failed\n");
exit(1);
}
/* Initialize face, load font from ttf file */
if(FT_New_Face( library, fname, 0, &face )){
printf("fontDataNums_init::FT_New_Face failed\n");
exit(1);
}
if(FT_Set_Char_Size( face, h * 64, h * 64, 96, 96)){
printf("fontDataNums_init::FT_Set_Char_Size failed.\n");
exit(1);
}
font.textures = (GLuint *) malloc(sizeof(GLuint) * 10);
glGenTextures(10, font.textures);
/* CREATE CHARACTER BITMAPS I WANT LOADED */
unsigned char g;
int i, j;
for( g='A'; g < 'J'; g++){
if(FT_Load_Char(face, g, FT_LOAD_RENDER)){
printf("fontDataNums::FT_Load_Char unable to load glyph for character\n");
exit(1);
}
FT_Glyph glyph;
if(FT_Get_Glyph(face->glyph, &glyph) ) { printf("GetGlyph failed.\n");}
if(FT_Glyph_To_Bitmap(&glyph, ft_render_mode_normal, 0, 1)){
printf("fontDataNums::FT_Glyph_To_Bitmap failed to create bitmap.\n");
exit(1);
}
FT_BitmapGlyph bitmap_glyph = (FT_BitmapGlyph)glyph;
int width = next_p2( bitmap_glyph->bitmap.width );
int height = next_p2( bitmap_glyph->bitmap.rows );
printf("WIDTH: %i and HEIGHT: %i \n", width, height);
/* PADDING FOR BITMAP */
expanded_data = (GLubyte *) malloc(sizeof(GLubyte) * 2 * width * height);
for(j=0; j <height;j++) {
for(i=0; i < width; i++){
expanded_data[2*(i+j*width)] = expanded_data[2*(i+j*width)+1] =
(i>=bitmap_glyph->bitmap.width || j>=bitmap_glyph->bitmap.rows) ?
0 :
bitmap_glyph->bitmap.buffer[i + bitmap_glyph->bitmap.width*j];
}
}
/* LOAD TEXTURE INTO OPENGL */
glActiveTexture(GL_TEXTURE0);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glBindTexture( GL_TEXTURE_2D, font.textures[g]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, expanded_data );
free(expanded_data);
FT_Done_Glyph(glyph);
}
/* Clean Up */
FT_Done_Face(face);
FT_Done_FreeType(library);
}
int next_p2 (int a )
{
int rval=1;
/* rval<<=1 Is A Prettier Way Of Writing rval*=2; */
while(rval<a) rval<<=1;
return rval;
}
void drawGlyph(){
renderGlyph(font.textures[1]);
}
void renderGlyph(GLuint textureName) {
GLuint tbo = 0;
GLuint vbo = 0;
glClear(GL_COLOR_BUFFER_BIT);
/* SETUP VERTICES */
GLfloat verts[8]={ 0.0f, 16.0f,
0.0f, 0.0f,
17.0f , 0.0f,
17.0f , 16.0f};
glEnableVertexAttribArray(GLT_ATTRIBUTE_VERTEX);
if(vbo == 0){glGenBuffers(1, &vbo);}
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 8, verts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(GLT_ATTRIBUTE_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0);
/* Setup Texture Buffer */
float x = 17.0f / 32.0f;
float y = 16.0f / 16.0f;
GLfloat vTex[8] = { 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f };
glEnableVertexAttribArray(GLT_ATTRIBUTE_TEXTURE0);
if(tbo == 0) { glGenBuffers(1, &tbo);}
glBindBuffer(GL_ARRAY_BUFFER, tbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 8, vTex, GL_DYNAMIC_DRAW);
glVertexAttribPointer(GLT_ATTRIBUTE_TEXTURE0, 2, GL_FLOAT, GL_FALSE, 0, 0);
/*Create Shaders*/
static const char *szIdentityShaderVP =
"#version 330\n"
"in vec4 vVertex;\n"
"in vec2 TexCoords;\n"
"out vec2 varyingTexCoords;\n"
"uniform mat4 mvp;\n"
"void main(void) \n"
"{"
"varyingTexCoords = TexCoords;\n"
"gl_Position = mvp * vVertex;\n"
"}\n";
static const char *szIdentityShaderFP =
"#version 330\n"
"uniform sampler2D colormap;\n"
"uniform vec4 showFan;"
"in vec2 varyingTexCoords;\n"
"void main(void) \n"
"{"
//"gl_FragColor = showFan;\n"
"gl_FragColor = texture(colormap, varyingTexCoords);\n"
"}\n";
GLuint shaderName = 0;
shaderName = gltLoadShaderPairSrcWithAttributes(szIdentityShaderVP, szIdentityShaderFP, 2, GLT_ATTRIBUTE_VERTEX, "vVertex", GLT_ATTRIBUTE_TEXTURE0, "TexCoords");
if(shaderName == 0) { printf("***shader compile failed****\n");}
glUseProgram(shaderName);
vmathM4MakeOrthographic( &pmatrix, -50.0f, 50.0f, -50.0f, 50.0f, -50.0f, 50.0f);
GLint mvp = 0;
mvp = glGetUniformLocation(shaderName, "mvp");
glUniformMatrix4fv(mvp, 1, GL_FALSE, (GLfloat *) &pmatrix);
GLint texUniform = 0;
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureName);
texUniform = glGetUniformLocation(shaderName, "colormap");
glUniform1i(texUniform, 0);
GLint showFan= 0;
showFan = glGetUniformLocation(shaderName, "showFan");
glUniform4f(showFan, 1.0f, 0.0f, 0.0f, 1.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
SDL_GL_SwapWindow(gcore.mainwindow);
glDisableVertexAttribArray(GLT_ATTRIBUTE_VERTEX);
glDisableVertexAttribArray(GLT_ATTRIBUTE_TEXTURE0);
glDeleteProgram(shaderName);
glDeleteBuffers(1, &vbo);
glDeleteBuffers(1, &tbo);
glCheckError();
}
void glCheckError(){
GLenum checkError = glGetError();
if(checkError != GL_NO_ERROR)
printf("Error: %i\n", checkError);
}
I use freetype in an IPhone device, it works creating the texture with...
glTexImage2D( GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, width, height, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, data);