I am relatively new to openGL and 3D drawing.
I drew a little person in Blender and exported its verteces in a .objfile so I can render that person using my C-code.
this is the person I drew:
The problem
When I import it in my code I don't see a person any more but a horrible ugly mess:
What I think
In the beginning when I tested my software I drew a cube and imported those vertices in my code. I saw that I only had 6 v-vertices. this gave me a partial cube ( hence one side is not closed).
So maybe it has something to do with that.
What can I do to solve this issue? Where did I make a mistake? Has it something to do with the projection or the export from blender?
This is my C code
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <GL/glut.h>
#include "myVertices.h"
float xCamera = 3.0;
float yCamera = 3.0;
float zCamera = 0.0;
float barelRollCamera = 0.0;
void init(void)
{
glClearColor(1.0, 1.0, 1.0, 1.0);
glEnable(GL_DEPTH_TEST);
}
void drawVertex(GLfloat vertex[][3], int numberVertex, int shape)
{
int i;
glPushMatrix();
switch(shape)
{
case 0: glBegin(GL_LINES); break;
case 1: glBegin(GL_POLYGON); break;
default: break;
}
for(i=0;i<numberVertex;i++)
{
glVertex3fv(vertex[i]);
}
glEnd();
glPopMatrix();
}
void drawScene(void)
{
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(xCamera,yCamera,3.0, 0.0,0.0,0.0, 0.0,1.0,barelRollCamera);
glColor3f(0.0, 0.0, 0.0);
//draws axis
drawVertex(assen,6,0);
//draws person I drew in blender using the vertices in my header file
drawVertex(person,1038,1);
glFlush();
}
void herschaal(){
glViewport(0,0,500,500);
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glOrtho(-6.500, 6.500, -6.500, 6.500, -6.00, 12.00);
}
int main( int argc, char * argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH );
glutInitWindowPosition(50, 100);
glutInitWindowSize(1000, 1000);
glutCreateWindow("test");
glutReshapeFunc(herschaal);
init();
glutDisplayFunc(drawScene);
glutMainLoop();
return 0;
}
the original export from blender: http://hastebin.com/owubizotuv.hs
the file with v-vertices I use, aka myVertices.h: http://hastebin.com/lirajuhiqe.avrasm
Related
#include <GL/glut.h>
#include <stdio.h>
#include <math.h>
int x0,y0,xn,yn;
void bresenham(void)
{
int dx,dy,m,pk,xk,yk,k;
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0,0,0);
setPixel(x0,y0);
dx=xn-x0;
dy=yn-y0;
pk=2*dy-dx;
m=dy/dx;
xk=x0;
yk=y0;
if(m>0 && m<1)
{
for(k=0;k<dx-1;k++)
{
if(pk<0)
{
xk++;
pk=pk+2*dy;
}
else
{
xk++;
yk++;
pk=pk+2*dy-2*dx;
}
setPixel(xk,yk);
}
}
glFlush();
}
int main (int argc,char **argv)
{
printf("enter starting points");
scanf("%d",&x0);
scanf("%d",&y0);
printf("enter endpoints");
scanf("%d",&xn);
scanf("%d",&yn);
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGB);
glutInitWindowPosition(50,25);
glutInitWindowSize(500,250);
glutCreateWindow("Bresenham Line");
init();
glutDisplayFunc(bresenham);
glutMainLoop();
return 0;
}
void init(void)
{
glClearColor(1.0,1.0,1.0,0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0,400.0,0.0,400.0);
}
void setPixel(GLint x,GLint y)
{
glColor3f(255,255,255);
glBegin(GL_POINTS);
glVertex2f(x,y);
glEnd();
}
the error is:
4 8 C:\Users\Hewlett\Documents\bresenham1.c [Error] 'y0' redeclared as
different kind of symbol
,4 14 C:\Users\Hewlett\Documents\bresenham1.c [Error] 'yn' redeclared
as different kind of symbol .
Can anyone tell why it is showing y0 &yn are redeclared as different kind of symbol
why it is showing y0 &yn are redeclared as different kind of symbol
Because you are:
The y0(), y1(), and yn() functions shall compute Bessel functions of x of the second kind of orders 0, 1, and n, respectively.
Switch to different names or don't #include <math.h>.
I'm trying to create a shader for a sceen-filling quad. But i can't seem to pass a uniform float to the shader.
In the following example i'm initializing glut, creating/compiling & linking the shaders, passing a uniform int & float to the shader, and getting them back to check. the int asdf works fine, the float qwer is behaving weirdly.
If i set the value of qwer to 1.3, the uniform is set to -2.0, if i set it to 1.2, the uniform is set to 2.0.
#include <stdio.h>
#include <string.h>
#include <GL/glut.h>
#include <unistd.h>
int gw = 640, gh = 360;
void drawScene(){
//creating a screen filling quad
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0f); glVertex2i(-1, 1);
glTexCoord2f(1.0, 0.0f); glVertex2i(1, 1);
glTexCoord2f(1.0, 1.0f); glVertex2i(1, -1);
glTexCoord2f(0.0, 1.0f); glVertex2i(-1, -1);
glEnd();
glutSwapBuffers();
}
void update(int value){
glutPostRedisplay();
glutTimerFunc(1000 / 30, update, 0);
}
int main(int argc, char** argv){
//shader source code
char *fraShdrStr = "\n\
uniform int asdf;\
uniform float qwer;\
void main(){\n\
vec2 p = gl_TexCoord[0].xy;\n\
gl_FragColor=vec4(p.x,qwer,float(asdf),1.0);\n\
}";
char *verShdrStr = "\n\
void main(){\n\
gl_Position=gl_ModelViewProjectionMatrix*gl_Vertex;\n\
gl_TexCoord[0]=gl_MultiTexCoord0;\n\
}";
size_t verShdrLen, fraShdrLen;
char errorBuffer[1024];
int errorLength;
int program, verShdr, fraShdr;
verShdrLen = strlen(verShdrStr);
fraShdrLen = strlen(fraShdrStr);
//initializing glut
glutInit(&argc, argv);
glutInitWindowSize(gw, gh);
glutCreateWindow("");
//creating, compiling and linking shaders
verShdr = glCreateShader(GL_VERTEX_SHADER);
fraShdr = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(verShdr, 1, &verShdrStr, &verShdrLen);
glShaderSource(fraShdr, 1, &fraShdrStr, &fraShdrLen);
glCompileShader(verShdr);
glGetShaderInfoLog(verShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Vertex Shader Error:\n%s\n", errorBuffer);
glCompileShader(fraShdr);
glGetShaderInfoLog(fraShdr, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Fragmen Shader Error:\n%s\n", errorBuffer);
program = glCreateProgram();
glAttachShader(program, verShdr);
glAttachShader(program, fraShdr);
glLinkProgram(program);
glGetProgramInfoLog(program, 1024, &errorLength, errorBuffer);
if(errorLength) printf("Linking Error:\n%s\n", errorBuffer);
glUseProgram(program);
//initializing variables to pass as uniform
int asdf = 9;
int asdf2;
float qwer = 1.0;
float qwer2;
//setting the uniform values
glUniform1i(glGetUniformLocation(program, "asdf"), asdf);
glGetUniformiv(program, glGetUniformLocation(program, "asdf"), &asdf2);
printf("%d\n", asdf2);
glUniform1f(glGetUniformLocation(program, "qwer"), qwer);
glGetUniformfv(program, glGetUniformLocation(program, "qwer"), &qwer2);
printf("%f\n", qwer2);
glutDisplayFunc(drawScene);
glutTimerFunc(1000/30, update, 0);
glutMainLoop();
}
You are misunderstanding the whole picture. Since OpenGL 3.0 using glBegin/glEnd in order to draw stuff is deprecated. Instead you should use an approach, based on using so-called Vertex Arrays. Check out this question to get an example code snippet.
//Program to implement Basic Incremental Algorithm
//Working on ubuntu
#include <GL/glut.h>
#include<stdlib.h>
#include <stdio.h>
GLfloat x0,x1,y0,y1; //Input variables taken as global
int flag=1; //variable for display1()
void init(void)
{
glClearColor(0.0,0.0,0.0,0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0,500.0,0.0,500.0);
}
void PutPixel(GLfloat x,GLfloat y)
{
glBegin(GL_POINTS);
glVertex2f(x,y); //To display pixels on-screen
glEnd();
glFlush();
}
void display1(void)
{
if (flag==1)
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0.7,1.0,1.0);
GLfloat m,c,y;
GLfloat i; //BIA algorithm
m=(y1-y0)/((float)(x1-x0));
c=y1-m*x1;
for (i=x0; i<=x1; i+=0.01)
{
y=c+m*i;
PutPixel(i,y);
}
flag++;
}
}
void Input()
{
printf("Enter the co-ods\n");
scanf("%f %f",&x0,&y0);
scanf("%f %f",&x1,&y1);
}
int main(int argc, char **argv)
{
Input();
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGBA);
glutInitWindowSize(500,500);
glutInitWindowPosition(100,100);
glutCreateWindow("BIA");
init();
glutDisplayFunc(display1);
glutMainLoop();
return 0;
}
I have initailaized flag as a global variable in the beginning and set it to 1. flag is used in the display1() to ensure that it executes only once. This is just one way I was trying to ensure that the output gets displayed.
Can anyone, please HELP!
Why doesn't the program stop taking input ?
It's working. I'm still unsure about which edit or change bought about it. But it's working!! it's displaying some output
The following openGL code APPARENTLY passes display function as an argument to glutDisplayFunction. In reality only a function pointer can be passed as argument to glutDisplayFunction. I wonder how this program is running successfully?
#include <GL/glut.h>
#include<GL/gl.h>// Header File For The GLUT Library
void init(void)
{
glClearColor (0.0,0.0,0.4,0.0);
glShadeModel(GL_FLAT);
}
void reshape (int w, int h)
{
glViewport(0,0, (GLsizei) w, (GLsizei)h); // indicates the shape of the available screen area into which the scene is mapped
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-10,10,-10,10,-10,10);
}
void display (void)
{
int i,j;
while(1){
for (i=-10; i <=10; i++){
glClearColor (1.0,0.0,0.0,0.0);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0.0, 0.0, 1.0);
glBegin(GL_TRIANGLES);
glVertex3f(i,2,5);
glVertex3f(6,-i,-5);
glVertex3f(1,9,-1);
glEnd();
glFlush();}
}
}
int main (int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode (GLUT_SINGLE | GLUT_RGB);
glutInitWindowPosition(100,100);
glutInitWindowSize(500,500);
glutCreateWindow (argv[0]);
init ();
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutMainLoop ();
return 0;
}
We need not use & operator to create a reference to a function.We can pass 'display' to 'glutDisplayFunc' directly. Something Like array name referring to address of first element?
This -glutDisplayFunc(&display) - also should be fine
Passing the name of a function without parentheses as an argument implicitly passes a pointer to the function. It is not possible to pass a function by value in C.
The algorithm goes into recursion. It prints out blue lines on the polygon but at a particular location (X=397,Y=98). The color doesn't get printed at the pixel and then goes into a recursion for the same pixel and its neighbors.
#include<GL/gl.h>
#include<GL/glu.h>
#include<GL/glut.h>
typedef struct pixel{ GLubyte red, green, blue; } pixel;
void boundaryfill(float x,float y, pixel fill, pixel boundary)
{
pixel c;
glReadPixels(x, y, 1, 1, GL_RGB, GL_UNSIGNED_BYTE, &c);
// printf("%d,%d,%d",(int)c.red,(int)c.green,(int)c.blue);
if ((c.red!=boundary.red)&&(c.red!=boundary.blue)&&(c.green!=boundary.green)&&(c.green!=fill.green)&&(c.blue!=fill.blue)&&(c.red!=fill.red)&&\
(x<=400))//&&(y<=100)&&(y>=50)&&(x>=200))
{
glBegin(GL_POINTS);
glColor3ub(fill.red,fill.green,fill.blue);
glVertex2f(x,y);
glEnd();
glFlush();
glReadPixels(x, y, 1, 1, GL_RGB, GL_UNSIGNED_BYTE, &c);
printf("\nCOlOR %d,%d,%d",(int)c.red,(int)c.green,(int)c.blue);
printf("\nX=%f,Y=%f",x,y)
//
boundaryfill(x+1,y,fill,boundary);
boundaryfill(x-1,y,fill,boundary);
boundaryfill(x,y+1,fill,boundary);
boundaryfill(x,y-1,fill,boundary);
}
}
void mydisplay()
{
glBegin(GL_POLYGON);
glColor3ub(10,10,10);
glVertex2f(200,50);
glVertex2f(200,100);
glVertex2f(400,100);
glVertex2f(400,50);
glEnd();
glFlush();
pixel fill,boundary;
fill.red=0;
fill.green=0;
fill.blue=255;
boundary.red=255;
boundary.green=255;
boundary.blue=255;
boundaryfill(300,75,fill,boundary);
glEnd();
glFlush();
}
void main(int argc,char **argv)
{
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(400,400);
glutInitWindowPosition(540,320);
glutCreateWindow("my first attempt");
glClearColor(1.0f,1.0f,1.0,0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glutDisplayFunc(mydisplay);
gluOrtho2D(0.0,400.0,0.0,400.0);
glutMainLoop();
}
I suspect the problem is that you're overflowing the stack due to infinite recursion. What prevents the program from just calling boundaryfill forever?
Also, you're missing a call to glEnd inside boundaryfill, but I suspect that's not the reason for the crash.