I've been following the open.gl tutorials without using the the GLM library because reasons (stubbornness and C).
I can't get the view and projection matrices to work properly.
Here's the relevant vertex shader code,
#version 150 core
in vec3 size;
in vec3 color;
in vec2 texcoord;
out vec3 Color;
out vec2 Texcoord;
uniform vec3 pos;
uniform float angle;
uniform vec3 camPos;
uniform vec3 camTarget;
const float fov=90, ratio=4.0/3.0, near=1.0, far=10.0;
mat4 projection ()
{
float t = tan(radians(fov)),
l = ratio * t;
return mat4(
vec4(near/l, 0.0, 0.0, 0.0),
vec4(0.0, near/t, 0.0, 0.0),
vec4(0.0, 0.0, -(far+near)/(far-near), -(2*far*near)/(far-near)),
vec4(0.0, 0.0, -1.0, 0.0)
);
}
mat4 rotZ(float theta)
{
return mat4(
vec4(cos(theta), -sin(theta), 0.0, 0.0),
vec4(sin(theta), cos(theta), 0.0, 0.0),
vec4(0.0, 0.0, 1.0, 0.0),
vec4(0.0, 0.0, 0.0, 1.0)
);
}
mat4 translate(vec3 translation)
{
return mat4(
vec4(1.0, 0.0, 0.0, 0.0),
vec4(0.0, 1.0, 0.0, 0.0),
vec4(0.0, 0.0, 1.0, 0.0),
vec4(translation.x, translation.y, translation.z, 1.0)
);
}
mat4 lookAtRH(vec3 eye, vec3 target)
{
vec3 zaxis = normalize(target - eye); // The "forward" vector.
vec3 xaxis = normalize(cross(vec3(0.0,0.0,1.0), zaxis));// The "right" vector.
vec3 yaxis = normalize(cross(zaxis, xaxis)); // The "up" vector.
mat4 axis = {
vec4(xaxis.x, yaxis.x, zaxis.x, 0),
vec4(xaxis.y, yaxis.y, zaxis.y, 0),
vec4(xaxis.z, yaxis.z, zaxis.z, 0),
vec4(dot(xaxis,-eye), dot(yaxis,-eye), dot(zaxis,-eye), 1)
};
return axis;
}
void main()
{
Color = color;
Texcoord = texcoord;
mat4 model = translate(pos) * rotZ(angle);
mat4 view = lookAtRH(camPos, camTarget);
gl_Position = projection() * view * model * vec4(size, 1.0);
}
From tweaking things around it seems as if the view matrix is correct, but the projection matrix is causing the dodgyness.
First I must remark that it is a very bad idea to do this directly in the shaders.
However, if you really want to, you can do this. You should be aware that the GLSL matrix constructors work with column vectors. Your projection matrix is thuse specified transposed (however, your translation matrix is correct).
EDIT: If you want pure C, here is nice lib for math (+ you can check the code :) ) https://github.com/datenwolf/linmath.h
Never do something like that :) Creating matrices in shader is very bad idea...
Vertex shader is executed for each vertex. So if you pass to shader thousand vertices you calculate new matrices thousand times. I think there's nothing more to explain :)
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
... // somewhere
glm::mat4 projection = glm::perspective(45.0f, float(window_width) / window_height, 0.1f, 100.0f);
glm::mat4 world(1.0f); // world/model matrix
glm::mat4 view(1.0f); // view/camera matrix
glm::mat4 mvp = projection * view * model;
... // inside the main loop
glUniformMatrix4fv(glGetUniformLocation(program, "mvpMatrix"), 1, GL_FALSE, &mvp[0][0]);
draw_mesh();
It's really cool and optimised :)
Related
I have a program that simply draws a cube. When applying transformation such as rotation scaling etc the program works. When I attempt to apply any perspective matrix such as perspective, frustum, or ortho the cube becomes very distorted in undefined ways. What I am getting confused about is why the program works fine when using the other transformations, but breaks when applying any sort of perspective view.
Additionally, when I change the GL_TRUE parameter to GL_FALSE in glUniformMatrix4fv the cube stops moving around the screen, but still has strange distortion. Here is the display function. Just applying perspective gives that same distortion.
void display()
{
vec4 e2 = { 0.0, 0.0, 1.0, 0.0};
vec4 at = { 0.0, 0.0, 0.0, 0.0 };
vec4 up = { 0.0, 1.0, 0.0, 0.0 };
mat4 rx = RotateX(theta);
mat4 ry = RotateY(theta);
mat4 ps = Perspective(zoom*45.0, aspect, 0.5, 10.0);
mat4 rxry = multiplymat4(rx, ry);
mat4 mv = LookAt(e2, at, up);
glUniformMatrix4fv( ModelView, 1, GL_TRUE, &mv.m[0][0] );
mat4 p = multiplymat4(rxry,ps);
glUniformMatrix4fv(projection, 1, GL_TRUE, &p.m[0][0]);
}
I do not believe it is in my perspective function since ortho and frustum does the same thing, but the perspective code is below.
mat4 Perspective(float fovy, float aspect, float zNear, float zFar)
{
float top = tan(fovy*DegreesToRadians/2) * zNear;
float right = top * aspect;
mat4 c = ZERO_MATRIX;
c.m[0][0] = zNear/right;
c.m[1][1] = zNear/top;
c.m[2][2] = -(zFar + zNear)/(zFar - zNear);
c.m[2][3] = -2.0*zFar*zNear/(zFar - zNear);
c.m[3][2] = -1.0;
c.m[3][3] = 0.0;
return c;
}
And the vertex shader
#version 120
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 color;
uniform mat4 ModelView;
uniform mat4 projection;
void
main()
{
gl_Position = projection*ModelView*vPosition/vPosition.w;
color = vec4( vColor);
}
I can spot two places where it seems me it is wrong.
1) you are multiplying your perspective matrix with rotational matrix. Why? if you want to do camera movements, they must be done in lookAt matrix. So, I suggest this simple code:
mat4 ps = Perspective(zoom*45.0, aspect, 0.5, 10.0);
glUniformMatrix4fv(projection, 1, GL_TRUE, &ps.m[0][0]);
2) Perspective divide is done automatically by GPU, in this way, it seems me you vertex shader is wrong too:
gl_Position = projection*ModelView*vPosition;
color = vec4( vColor);
Matrix multiplication order matters, and from what I see your p matrix should be
mat4 p = multiplymat4(ps, rxry);
Though logically rotation belongs to the view matrix.
Also, / vPosition.w likely does nothing in the shader, since w equals 1.0 unless you actually supplied 4-dimensional position data. Nor do you need a perspective divide in your vertex shader.
I was trying out a shader example to draw a triangle with the RGB interpolated across the vertices, and assumed that using
layout (location = 0)in vec4 vertex;
layout (location = 1) in vec4 VertexColor;
in the vertex shader would work, since the 4 float colors immediately follow 4 float vertices in the array. However, it always drew a solid red triangle. So I tried location = 4 only to get a black screen. Experimenting further gave a blue triangle for location = 2, and finally got the interpolated result with location = 3.
My question is, how was I expected to know to enter 3 as the location? The vertex array looks like this:
GLfloat vertices[] = { -1.5,-1.5, 0.0, 1.0, //first 3D vertex
1.0, 0.0, 0.0, 1.0, //first color
0.0, 1.5, 0.0, 1.0, //second vertex
0.0, 1.0, 0.0, 1.0, //second color
1.5,-1.5, 0.0, 1.0, //third vertex
0.0, 0.0, 1.0, 1.0,}; //third color
note: edited the original layout=1 from layout = 3 in first code block
each location can hold 4 floats (a single vec4), So a valid option would also be:
layout (location = 0)in vec4 vertex;
layout (location = 1) in vec4 VertexColor;
What dictates where what attribute comes from is the set of glVertexAttribPointer calls.
these are the ones I would expect for the glsl declaration above (assuming you use a VBO)
glVertexAttribPointer(0, 4, GL_FLOAT, false, sizeof(float)*4*2, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, false, sizeof(float)*4*2, sizeof(float)*4);
I wrote a little program to display a 32bit float texture in a simple quad. When displaying the quad, the texture color is always black. I experimented with a lot of things, but I couldn't make it work. I'm really at loss what the problem with it.
The code of creating the OpenGL texture goes like this
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, width, height, 0, GL_RGBA, GL_FLOAT, textureData);
Using the debugger, there's no error in any of these calls. I also examined the textureData pointer, and got the expected results (in my simplified program, it is just a gradient texture).
This is the vertex shader code in GLSL:
#version 400
in vec4 vertexPosition;
out vec2 uv;
void main() {
gl_Position = vertexPosition;
uv.x = (vertexPosition.x + 1.0) / 2;
uv.y = (vertexPosition.y + 1.0) / 2;
}
It's kind of a simple generation of the UV coordinates without taking them as vertex attributes. The corresponding vertex buffer object is really simple:
GLfloat vertices[4][4] = {
{ -1.0, 1.0, 0.0, 1.0 },
{ -1.0, -1.0, 0.0, 1.0 },
{ 1.0, 1.0, 0.0, 1.0 },
{ 1.0, -1.0, 0.0, 1.0 },
};
I've tested the solution, and it displays the quad covering the entire window as I wanted to. Displaying the UV coordinates in the fragment shader reproduce the gradient that I expected to get. Now here's the fragment shader:
#version 400
uniform sampler2D myTex;
in vec2 uv;
out vec4 fragColor;
void main() {
fragColor = texture(myTex, uv);
// fragColor += vec4(uv.x, uv.y, 0, 1);
}
The commented out line displays the UV coordinates as color for debugging purposes. What do I do wrong here? I just can't see why the texture() call returns 0 where the texture seems completely right, and the uv coordinates are also proper. I link here the full code if there's something else I do wrong: gl-view.c
EDIT: This is how I set up the myTex sampler:
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
glUniform1i(glGetUniformLocation(shaderProgram, "myTex"), 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
EDIT: Cleared up the vertex shader code.
I've found the issue: I didn't set any MAG or MIN filter on the texture. Setting the MIN filter to GL_NEAREST solved the problem.
First of all I defined a structure to express the coordinated of a pyramid:
typedef struct
{
GLfloat xUp;
GLfloat yUp;
GLfloat zUp;
GLfloat base;
GLfloat height;
}pyramid;
Pretty self-explanatory here : I store the coordinates of the uppest point, the base and the height.
The I wrote a function to draw a pyramid:
void drawPyramid(pyramid pyr)
{
GLfloat p1[]= {pyr.xUp+pyr.base/2.0, pyr.yUp-pyr.height, pyr.zUp-pyr.base/2.0};
GLfloat p2[]= {pyr.xUp+pyr.base/2.0, pyr.yUp-pyr.height, pyr.zUp+pyr.base/2.0};
GLfloat p3[]= {pyr.xUp-pyr.base/2.0, pyr.yUp-pyr.height, pyr.zUp+pyr.base/2.0};
GLfloat p4[]= {pyr.xUp-pyr.base/2.0, pyr.yUp-pyr.height, pyr.zUp-pyr.base/2.0};
GLfloat up[]= {pyr.xUp, pyr.yUp, pyr.zUp};
glBegin(GL_TRIANGLES);
glColor4f(1.0, 0.0, 0.0, 0.0);
glVertex3fv(up);
glVertex3fv(p1);
glVertex3fv(p2);
glColor4f(0.0, 1.0, 0.0, 0.0);
glVertex3fv(up);
glVertex3fv(p2);
glVertex3fv(p3);
glColor4f(0.0, 0.0, 1.0, 0.0);
glVertex3fv(up);
glVertex3fv(p3);
glVertex3fv(p4);
glColor4f(1.0, 1.0, 0.0, 0.0);
glVertex3fv(up);
glVertex3fv(p4);
glVertex3fv(p1);
glEnd();
glColor4f(0.0, 1.0, 1.0, 0.0);
glBegin(GL_QUADS);
glVertex3fv(p1);
glVertex3fv(p2);
glVertex3fv(p3);
glVertex3fv(p4);
glEnd();
}
I struggled to draw all the vertices in anti-clockwise order, but probably I messed up something.
This is how I display the pyramid in my rendering function:
void display()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glTranslatef(0.0, -25.0, 50.0);
glRotatef(-angle, 0.0, 1.0, 0.0);
glTranslatef(0.0, 25.0, -50.0);
pyramid pyr;
pyr.xUp=0.0;
pyr.yUp=10.0;
pyr.zUp=50.0;
pyr.base=10.0;
pyr.height=18.0;
glColor4f(1.0, 0.0, 0.0, 0.0);
drawPyramid(pyr);
glutSwapBuffers();
}
I also use an init method called before the glut main loop:
void init()
{
glEnable(GL_DEPTH);
glViewport(-1.0, 1.0, -1.0, 1.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(35.0, 1.0, 1.0, 100.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0,1.0,0.0, 0.0,1.0,30.0, 0.0,1.0,0.0);
}
angle is just a double that I use to rotate the pyramid, changeable by pressing 'r', but this is not relevant.It appears that the real problem is how I draw the vertices.
The problem is that the faces of the pyramid appear scattered, messed up.I would better describe this situation with an image:
There's a face that is too small, that is displayed and I don't know why.
If I rotate the pyramid it appears messed up, I even recored a video to describe this.
Later I could upload it if the problem is not totally clear.
PS: Many people have noticed that I am using outdated techniques.But unfortunately this is what my university offers.
EDIT
I forgot to say about the main function:
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitWindowPosition(100, 100);
glutInitWindowSize(500, 500);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
glutCreateWindow("Sierpinsky Pyramid");
glutDisplayFunc(display);
glutKeyboardFunc(keyboard);
init();
glutMainLoop();
return 0;
}
It looks like depth buffer isn't initialzied.
Calling glEnable(GL_DEPTH_TEST) is not enough. You must correctly initialize glut and specify that you want depth buffer support, otherwise you won't get a depth buffer. If I remember correctly, this is done using glutInitDisplayMode(GLUT_DEPTH|...). See documentation here and introduction here. Additional info can be found using google.
--EDIT--
You're passing invalid parameter to glEnable. call glEnable(GL_DEPTH_TEST) instead of glEnable(GL_DEPTH).
Also:
Matrix code in display function isn't protected by glPushMatrix/glPopMatrix. Which means that every time you rotate pyramid, rotation is applied to previous transform. I.e. calling display function will rotate the pyramid.
glViewport is called with invalid parameters. glViewport takes 4 integer arguments, but you're trying to pass floats. Also, what's "width of -1.0" supposed to mean?
You have not checked any error codes (glGetError). If you tried to call glGetError after glEnable call, then you'd see that it returns GL_INVALID_ENUM.
OpenGL has documentation. Documentation is available on opengl.org. Use it and read it. Also, I'd recommend reading "OpenGL red book".
This question is unlikely to help any future visitors; it is only relevant to a small geographic area, a specific moment in time, or an extraordinarily narrow situation that is not generally applicable to the worldwide audience of the internet. For help making this question more broadly applicable, visit the help center.
Closed 10 years ago.
I got 256*256 texture and everything is fine, but part i trying to map whole texture on whole quad is ruined.
My quad textured only by 1/5 from left to right, rest is black which means no texturing there.
Texturing part in general is working like it shoud.
glFrontFace is untouched (default)
Texture created with:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
GLfloat quad_pos[] =
{
0.5, 0.5, 0.0,
-0.5, 0.5, 0.0,
0.5,-0.5, 0.0,
-0.5,-0.5, 0.0
};
//Maybe i need 5 and 6 verticles tex coords?
GLfloat quad_tex[]=
{
0.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0
};
GLfloat quad_col[]=
{
0.0, 1.0, 1.0, 1.0,
1.0, 0.0, 1.0, 1.0,
1.0, 1.0, 0.0, 1.0,
0.0, 0.0, 1.0, 1.0
};
//In draw_quad method()
//Binding texture and setting uniforms/attributes skipped
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
Vertex shader
attribute vec4 av_col;
attribute vec4 av_pos;
attribute vec2 av_tex;
varying vec4 vv_col;
varying vec2 vv_tex;
uniform mat4 um_mvp;
void main()
{
vv_col = av_col;
vv_tex = av_tex;
gl_Position = um_mvp * av_pos;
}
Fragment shader
precision lowp float;
uniform sampler2D us_tex;
varying vec4 vv_col;
varying vec2 vv_tex;
void main()
{
gl_FragColor = (vv_col * texture2D(us_tex, vv_tex));
}
if change tex coords to
GLfloat quad_tex[]=
{
0.0, 0.2,
0.0, 0.0,
0.2, 0.2,
0.2, 0.0
};
Quad will be fully textured and colored, but texture will be overscaled (minecraft pixel style).
Quad will be fully textured and colored, but texture will be overscaled (minecraft pixel style)
It sounds as though you have default GL_NEAREST filtering enabled. Try using GL_LINEAR instead:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
http://www.khronos.org/opengles/sdk/docs/man/xhtml/glTexParameter.xml