Compilation error with bresenham line algorithm? - c

#include <GL/glut.h>
#include <stdio.h>
#include <math.h>
int x0,y0,xn,yn;
void bresenham(void)
{
int dx,dy,m,pk,xk,yk,k;
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0,0,0);
setPixel(x0,y0);
dx=xn-x0;
dy=yn-y0;
pk=2*dy-dx;
m=dy/dx;
xk=x0;
yk=y0;
if(m>0 && m<1)
{
for(k=0;k<dx-1;k++)
{
if(pk<0)
{
xk++;
pk=pk+2*dy;
}
else
{
xk++;
yk++;
pk=pk+2*dy-2*dx;
}
setPixel(xk,yk);
}
}
glFlush();
}
int main (int argc,char **argv)
{
printf("enter starting points");
scanf("%d",&x0);
scanf("%d",&y0);
printf("enter endpoints");
scanf("%d",&xn);
scanf("%d",&yn);
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGB);
glutInitWindowPosition(50,25);
glutInitWindowSize(500,250);
glutCreateWindow("Bresenham Line");
init();
glutDisplayFunc(bresenham);
glutMainLoop();
return 0;
}
void init(void)
{
glClearColor(1.0,1.0,1.0,0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0,400.0,0.0,400.0);
}
void setPixel(GLint x,GLint y)
{
glColor3f(255,255,255);
glBegin(GL_POINTS);
glVertex2f(x,y);
glEnd();
}
the error is:
4 8 C:\Users\Hewlett\Documents\bresenham1.c [Error] 'y0' redeclared as
different kind of symbol
,4 14 C:\Users\Hewlett\Documents\bresenham1.c [Error] 'yn' redeclared
as different kind of symbol .
Can anyone tell why it is showing y0 &yn are redeclared as different kind of symbol

why it is showing y0 &yn are redeclared as different kind of symbol
Because you are:
The y0(), y1(), and yn() functions shall compute Bessel functions of x of the second kind of orders 0, 1, and n, respectively.
Switch to different names or don't #include <math.h>.

Related

OpenGL glUniform1f not updating vertex shader

All the tutorials seem to indicate that I am doing things correctly, the vertex shader works, however it fails to recognize any input changes from the main program through the use of the glUniform1f function. I check glGetError after each line, there are no errors. I check glGetShaderiv and glGetShaderInfoLog, there are no issues. I am testing with OpenGL version 2.1 (unknown profile, but assuming the core profile) as reported by SDL.
#if defined(__WINDOWS__) || defined(_WIN32) || defined(_WIN64) || defined(__WIN32__) || defined(__TOS_WIN__)\
|| defined(__CYGWIN__)
/* Compiling for Windows */
#ifndef __WINDOWS__
#define __WINDOWS__
#endif
#include <windows.h>
#endif/* Predefined Windows macros */
#include <SDL2/SDL.h>
#include <GL/GL.h>
#include <stdlib.h>
#include <stdio.h>
#include <error.h>
//return type not verified
void glGenBuffers();
void glBindBuffer();
void glBufferData();
unsigned int glCreateShader();
void glShaderSource();
void glCompileShader();
void glGetShaderiv();
void glGetShaderInfoLog();
unsigned int glCreateProgram();
void glAttachShader();
void glLinkProgram();
void glGetProgramiv();
void glGetProgramInfoLog();
void glVertexAttribPointer();
void glEnableVertexAttribArray();
void glUseProgram();
void glDeleteShader();
void glGenVertexArrays();
void glBindVertexArray();
GLint glGetUniformLocation();
void glUniform1f();
void glDeleteProgram();
void glDeleteBuffers();
int fixSDLconsole() {
FILE *console = freopen("stdout.txt", "a",stdout);
if (console == NULL) {return errno;}
console = freopen("stdout.txt", "a",stderr);
if (console == NULL) {return errno;}
return 0;
}
void printGLVersionNumber() {
int majorVersion;
int minorVersion;
int profile;
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, &majorVersion);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, &minorVersion);
SDL_GL_GetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, &profile);
fprintf(stderr,"GL version %d.%d ",majorVersion,minorVersion);
switch (profile) {
case SDL_GL_CONTEXT_PROFILE_CORE: fprintf(stderr,"core (%d)\n",profile);break;
case SDL_GL_CONTEXT_PROFILE_COMPATIBILITY: fprintf(stderr,"compatibility (%d)\n",profile);break;
case SDL_GL_CONTEXT_PROFILE_ES: fprintf(stderr,"E.S. (%d)\n",profile);break;
default: fprintf(stderr, "unknown profile: %d\n",profile);break;
}
return;
}
#define checkGlError(label) {int error = glGetError();if (error != GL_NO_ERROR) {error_at_line(0,0,__FILE__,__LINE__,"error=%d", error);goto label;}}
int main(int argc, char **argv) {
SDL_Window *window = NULL;
SDL_GLContext context = NULL;
GLuint verticesGlIds[] = {0,0};
GLuint vertexShaderGlId = 0;
GLuint shaderProgramGlId = 0;
if (fixSDLconsole()) {
return errno;
}
if (SDL_Init(SDL_INIT_VIDEO) != 0) {
error_at_line(1,0,__FILE__,__LINE__,"Unable to initialize SDL: %s",SDL_GetError());
goto error;
}
printGLVersionNumber();
window = SDL_CreateWindow("Window Title",SDL_WINDOWPOS_UNDEFINED,SDL_WINDOWPOS_UNDEFINED,640,640,SDL_WINDOW_OPENGL);
if (window == NULL) {
error_at_line(0,0,__FILE__,__LINE__,"Could not create window: %s", SDL_GetError());
goto error;
}
context = SDL_GL_CreateContext(window);
if (context == NULL) {
error_at_line(0,0,__FILE__,__LINE__,"Could not create OpenGL context: %s", SDL_GetError());
goto error;
}
glViewport(0,0,640,640);checkGlError(error);
glClearColor(.9f,.9f,.9f,1.f);checkGlError(error);
glEnableClientState(GL_VERTEX_ARRAY);checkGlError(error);
glEnableClientState(GL_COLOR_ARRAY);checkGlError(error);
float vertices[] = {
-.5f,0.f,0.f,
0.f,.5f,0.f,
0.f,-.5f,0.f,
0.f,.5f,0.f,
.5f,.5f,0.f,
0.f,0.f,0.f
};
float colors[] = {
1.f,0.f,0.f,//red
.5f,0.f,0.f,//red
0.f,1.f,0.f,//green
0.f,.5f,0.f,//green
0.f,0.f,1.f,//blue
0.f,0.f,.5f//blue
};
glGenBuffers(2, &verticesGlIds);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[0]);checkGlError(error);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[1]);checkGlError(error);
glBufferData(GL_ARRAY_BUFFER,sizeof(colors),colors, GL_STATIC_DRAW);checkGlError(error);
char *vertexShader =
"#version 120\n"\
"attribute vec3 aPos;\n"\
"uniform float i;\n"\
"void main() {\n"\
"gl_FrontColor=gl_Color;\n"\
"gl_Position = vec4(aPos.x+i/2,aPos.y,aPos.z,1.0);\n"\
"}\n";
vertexShaderGlId = glCreateShader(GL_VERTEX_SHADER);checkGlError(error);
if (vertexShaderGlId == 0) {error_at_line(0,0,__FILE__,__LINE__,"vertex shader could not be created");goto error;}
glShaderSource(vertexShaderGlId, 1, &vertexShader, NULL);checkGlError(error);
glCompileShader(vertexShaderGlId);checkGlError(error);
{
GLint success;
glGetShaderiv(vertexShaderGlId, GL_COMPILE_STATUS, &success);checkGlError(error);
if (success == GL_FALSE) {
char infoLog[512];
glGetShaderInfoLog(vertexShaderGlId, 512, NULL, infoLog);checkGlError(error);
error_at_line(0,0,__FILE__,__LINE__,"Vertex Shader problem: %s", infoLog);
goto error;
}
}
shaderProgramGlId = glCreateProgram();checkGlError(error);
if (shaderProgramGlId == 0) {error_at_line(0,0,__FILE__,__LINE__,"shader program could not be created");goto error;}
glAttachShader(shaderProgramGlId, vertexShaderGlId);checkGlError(error);
glLinkProgram(shaderProgramGlId);checkGlError(error);
{
int success;
glGetProgramiv(shaderProgramGlId, GL_LINK_STATUS, &success);checkGlError(error);
if (!success) {
char infoLog[512];
glGetProgramInfoLog(shaderProgramGlId, 512, NULL, infoLog);checkGlError(error);
error_at_line(0,0,__FILE__,__LINE__,"Shader program problem: %s", infoLog);
}
}
glDeleteShader(vertexShaderGlId);checkGlError(error);
GLint iLocation = glGetUniformLocation(shaderProgramGlId, "i");checkGlError(error);
if (iLocation == -1) {error_at_line(0,0,__FILE__,__LINE__,"uniform i not found in shader");goto error;}
error_at_line(0,0,__FILE__,__LINE__,"iLocation: %d", iLocation);
for (int frame = 0; frame < 100; ++frame) {
glClear(GL_COLOR_BUFFER_BIT);checkGlError(error);
glUseProgram(shaderProgramGlId);checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[0]); checkGlError(error);
glVertexPointer(3,GL_FLOAT,0,0); checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, verticesGlIds[1]); checkGlError(error);
glColorPointer(3,GL_FLOAT,0,0); checkGlError(error);
glUniform1f(iLocation, (float) (frame%2)); checkGlError(error);
glDrawArrays(GL_TRIANGLES, 0,sizeof(vertices)/sizeof(float)/3); checkGlError(error);
glBindBuffer(GL_ARRAY_BUFFER, 0); checkGlError(error);
SDL_GL_SwapWindow(window);
SDL_Delay(100);
}
glDeleteProgram(shaderProgramGlId);
glDeleteShader(vertexShaderGlId);
glDeleteBuffers(sizeof(verticesGlIds)/sizeof(GLuint), verticesGlIds);
SDL_GL_DeleteContext(context);
SDL_Delay(3000);
SDL_DestroyWindow(window);
SDL_Quit();
return EXIT_SUCCESS;
error:
glDeleteProgram(shaderProgramGlId);
glDeleteShader(vertexShaderGlId);
glDeleteBuffers(sizeof(verticesGlIds)/sizeof(GLuint), verticesGlIds);
if (context != NULL) SDL_GL_DeleteContext(context);
if (window != NULL) SDL_DestroyWindow(window);
SDL_Quit();
return EXIT_FAILURE;
}
#if defined(__WINDOWS__)
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nShowCmd) {
char *argv[1] = {(char *) 0};
return main(0, argv);
}
#endif
note that I am not familiar with OpenGL's extension function loading issues and routines such as SDL's SDL_GL_LoadLibrary and SDL_GL_GetProcAddress I just manually define the method signatures at the top of the file and import GL through the linker. I don't expect this to be an issue but it is the only issue, I am aware of, that I haven't looked into, that may be causing my problems.
So you declare the function like this:
void glUniform1f();
By omitting any parameters, the compiler will assume that all arguments are of type int. For most other GL functions, this works by chance, because those arguments are just integer types in most cases anyway, but for glUniform1f, it will mean that the function argument is converted to integer, but the resulting bit-pattern is implicitely re-interpreted as GLfloat by the function since the actual prototype for glUniform1f is something like this
void glUniform1f(int location, GLfloat value);
note that I am not familiar with OpenGL's extension function loading issues and routines such as SDL's SDL_GL_LoadLibrary and SDL_GL_GetProcAddress I just manually define the method signatures at the top of the file and import GL through the linker.
You shouldn't do this. The GL functions you try to access might not even be exported by the library at all. If you do not want to manually deal with loading every function pointer, you can use one of the existing OpenGL loaders.
gl_Position expects Clip-space coordinates, which are a hiper-cube of size [2w,2w,2w,w].
For vec4(x, y, z, w) if any of the [x,y,z] is outside of [-w,w] range, then the vertex is clipped.
The coordinates will be automatically converted by the GPU to NDC-space x/w, y/w, z/w, 1 (aka "perspective division") before the fragment shader.
Your GLSL code gl_Position = vec4(aPos.x+i/2,aPos.y,aPos.z,1.0); uses the uniform i.
You update it by glUniform1f(iLocation, (float) (frame%2));
First issue is frame%2. Only 0 or 1 is passed to the GPU. With your current vertices data, only two pairs of triangles should be drawn.
Second issue is that frame is a value 0 <= frame < 100. So, if you pass frame instead of frame%2, then for most values aPos.x + i/2 will fall outside the Clip space and you will see only the first two triangle-pairs, or parts of them.

C, SDL: error: cannot convert 'SDL_Surface*' to 'SDL_Surface* (*)[15]

so I'm working on a code for filling a screen with a table of surfaces; here's the code:
main.c
#ifdef __cplusplus
#include <cstdlib>
#else
#include <stdlib.h>
#endif
#include <SDL/SDL.h>
#include <SDL_image.h>
#include "maploader.h"
#define W 510
#define H 510
#define SIZE 34
void pause();
int main ( int argc, char** argv )
{
SDL_Surface *screen = NULL;
SDL_Surface *surfaces[15][15];
SDL_Init(SDL_INIT_VIDEO);
screen = SDL_SetVideoMode(W, H, 32, SDL_HWSURFACE | SDL_DOUBLEBUF);
SDL_WM_SetCaption("demon game", NULL);
SDL_FillRect(screen, NULL, SDL_MapRGB(screen->format, 255, 255, 255));
mapload(screen, surfaces[15][15], NULL);
SDL_Flip(screen);
pause();
SDL_QUIT;
return EXIT_SUCCESS;
}
void pause()
{
int continuer = 1;
SDL_Event event;
while (continuer)
{
SDL_WaitEvent(&event);
switch(event.type)
{
case SDL_QUIT:
continuer = 0;
}
}
}
maploader.c
#ifdef __cplusplus
#include <cstdlib>
#else
#include <stdlib.h>
#endif
#include <SDL/SDL.h>
#include <SDL_image.h>
#define W 510
#define H 510
#define SIZE 34
SDL_Surface *surfaces[15][15];
void mapload(SDL_Surface *screen, SDL_Surface *surfaces[][15], int lvl)
{
FILE *level = NULL;
char elements[125];
int i, j, k = 0;
SDL_Rect elementposition = {0,0};
level = fopen("level.txt", "r");
if (level == NULL)
{
exit(0);
}
fgets(elements, 125, level);
SDL_FillRect(screen, NULL, SDL_MapRGB(screen->format, 255, 255, 255));
for (i=0; i<15; i++)
{
for (j=0; j<15; j++)
{
if (elements[k] == "0")
{
surfaces[i][j] = IMG_Load("mur.jpg");
}
else if (elements[k] == "1")
{
surfaces[i][j] = IMG_Load("caisse.jpg");
}
else if (elements[k] == "2")
{
surfaces[i][j] = IMG_Load("objectif.png");
}
else
{
surfaces[i][j] = NULL;
}
k++;
}
}
for (i=0; i<15; i++)
{
for (j=0; j<15; j++)
{
SDL_BlitSurface(surfaces[i][j], NULL, screen, &elementposition);
elementposition.x += SIZE;
}
elementposition.y += SIZE;
}
}
the only error I get from compiling is the following: "cannot convert 'SDL_Surface*' to 'SDL_Surface* ()[15]' for argument '2' to 'void mapload(SDL_Surface, SDL_Surface* (*)[15], int)'|"
apparently, the error is initialized from the second argument of the mapload function, but I don't have a clear idea about what exactly is wrong. any ideas, please?
This,
mapload(screen, surfaces[15][15], NULL);
should be
mapload(screen, surfaces, NULL);
But now you should ask yourself, if you didn't know that then probably,
The signature of void mapload(SDL_Surface *screen, SDL_Surface *surfaces[][15], int lvl) is altogether wrong.
You need to study what are arrays in c and their relation to pointers.
Note that surfaces[15][15] means the 16th element of the 16th array of pointers, none of which exists because you only allocated 15 of each. So you need to understand arrays in c, how they are allocated and how you can have a dynamic array.
Also, the fact that you tell a c compiler that a function is expecting an array is not very relevant inside such function, so the syntax SDL_Surface *surfaces[][15] seems strange to a c programmer.
And finally, since surfaces is a global variable you don't need to pass it as a parameter, but then you should ask yourself, should it be a global variable?

What's wrong with my code that prints structure information?

#include <stdio.h>
#include <conio.h>
#include <windows.h>
#include <ctype.h>
struct ALUMNO{
int cod;
char nombre[20], grupo[3], app[20], apm[20];
float prom,cali[5];
} al[20]={'\0'};
void gotoxy(int x,int y){
HANDLE hcon;
hcon = GetStdHandle(STD_OUTPUT_HANDLE);
COORD dwPos;
dwPos.X = x;
dwPos.Y= y;
SetConsoleCursorPosition(hcon,dwPos);
}
int main()
{
char gru[3];
int x = 0, sw, ac;
al[0].cod=12345;
strcpy(al[0].grupo,"1A");
strcpy(al[0].nombre,"Erick");
strcpy(al[0].app,"Medina");
strcpy(al[0].apm,"Ramirez");
al[0].prom=0.0;
al[1].cod=12346;
strcpy(al[1].grupo,"1A");
strcpy(al[1].nombre,"Emmanuel");
strcpy(al[1].app,"Sauceda");
strcpy(al[1].apm,"Perez");
al[1].prom=0.0;
al[2].cod=12347;
strcpy(al[2].grupo,"1B");
strcpy(al[2].nombre,"Vincio");
strcpy(al[2].app,"Lopez");
strcpy(al[2].apm,"Martinez");
al[2].prom=0.0;
//salon B
al[3].cod=12348;
strcpy(al[3].grupo,"1B");
strcpy(al[3].nombre,"Bryan");
strcpy(al[3].app,"Osuna");
strcpy(al[3].apm,"Beltran");
al[3].prom=0.0;
al[4].cod=12349;
strcpy(al[4].grupo,"1C");
strcpy(al[4].nombre,"Fullano");
strcpy(al[4].app,"Mangano");
strcpy(al[4].apm,"Centenario");
al[4].prom=0.0;
al[5].cod=12350;
strcpy(al[5].grupo,"1C");
strcpy(al[5].nombre,"Chapo");
strcpy(al[5].app,"Guzman");
strcpy(al[5].apm,"Loera");
al[5].prom=0.0;
//done
printf("Grupo: ");
scanf("%s",&gru);
gru[1]=toupper(gru[1]);
system("cls");
printf("Codigo\tAp.paterno\tap.materno\tnombre\tpromedio");
for (x=0, sw=0; x<25 && al[x].cod!=0; x++){
if (strcmp(gru,al[x].grupo)==0){
sw=1;
ac++;
}
if (sw==1){
gotoxy(1,ac);
printf("%i",al[x].cod);
gotoxy(12,ac);
printf("%s",al[x].app);
gotoxy(30,ac);
printf("%s",al[x].apm);
gotoxy(50,ac);
printf("%s",al[x].nombre);
gotoxy(60,ac);
printf("%.2f",al[x].prom);
}
}
ac=0;
}
For some reason, when you type in the correct group and hit enter, it will print maternal last names on top of others. Or some names maybe missing, or it's just my compiler. It works fine when you only have one name per group.
Your gru variable is used wrong in scanf. Should be gru, not &gru.
By the way, don't use scanf, use fgets

deformed object when importing obj file to openGL code

I am relatively new to openGL and 3D drawing.
I drew a little person in Blender and exported its verteces in a .objfile so I can render that person using my C-code.
this is the person I drew:
The problem
When I import it in my code I don't see a person any more but a horrible ugly mess:
What I think
In the beginning when I tested my software I drew a cube and imported those vertices in my code. I saw that I only had 6 v-vertices. this gave me a partial cube ( hence one side is not closed).
So maybe it has something to do with that.
What can I do to solve this issue? Where did I make a mistake? Has it something to do with the projection or the export from blender?
This is my C code
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <GL/glut.h>
#include "myVertices.h"
float xCamera = 3.0;
float yCamera = 3.0;
float zCamera = 0.0;
float barelRollCamera = 0.0;
void init(void)
{
glClearColor(1.0, 1.0, 1.0, 1.0);
glEnable(GL_DEPTH_TEST);
}
void drawVertex(GLfloat vertex[][3], int numberVertex, int shape)
{
int i;
glPushMatrix();
switch(shape)
{
case 0: glBegin(GL_LINES); break;
case 1: glBegin(GL_POLYGON); break;
default: break;
}
for(i=0;i<numberVertex;i++)
{
glVertex3fv(vertex[i]);
}
glEnd();
glPopMatrix();
}
void drawScene(void)
{
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(xCamera,yCamera,3.0, 0.0,0.0,0.0, 0.0,1.0,barelRollCamera);
glColor3f(0.0, 0.0, 0.0);
//draws axis
drawVertex(assen,6,0);
//draws person I drew in blender using the vertices in my header file
drawVertex(person,1038,1);
glFlush();
}
void herschaal(){
glViewport(0,0,500,500);
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glOrtho(-6.500, 6.500, -6.500, 6.500, -6.00, 12.00);
}
int main( int argc, char * argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH );
glutInitWindowPosition(50, 100);
glutInitWindowSize(1000, 1000);
glutCreateWindow("test");
glutReshapeFunc(herschaal);
init();
glutDisplayFunc(drawScene);
glutMainLoop();
return 0;
}
the original export from blender: http://hastebin.com/owubizotuv.hs
the file with v-vertices I use, aka myVertices.h: http://hastebin.com/lirajuhiqe.avrasm

OpenGL program doesn't stop taking input and doesn't display Output

//Program to implement Basic Incremental Algorithm
//Working on ubuntu
#include <GL/glut.h>
#include<stdlib.h>
#include <stdio.h>
GLfloat x0,x1,y0,y1; //Input variables taken as global
int flag=1; //variable for display1()
void init(void)
{
glClearColor(0.0,0.0,0.0,0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0,500.0,0.0,500.0);
}
void PutPixel(GLfloat x,GLfloat y)
{
glBegin(GL_POINTS);
glVertex2f(x,y); //To display pixels on-screen
glEnd();
glFlush();
}
void display1(void)
{
if (flag==1)
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0.7,1.0,1.0);
GLfloat m,c,y;
GLfloat i; //BIA algorithm
m=(y1-y0)/((float)(x1-x0));
c=y1-m*x1;
for (i=x0; i<=x1; i+=0.01)
{
y=c+m*i;
PutPixel(i,y);
}
flag++;
}
}
void Input()
{
printf("Enter the co-ods\n");
scanf("%f %f",&x0,&y0);
scanf("%f %f",&x1,&y1);
}
int main(int argc, char **argv)
{
Input();
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE|GLUT_RGBA);
glutInitWindowSize(500,500);
glutInitWindowPosition(100,100);
glutCreateWindow("BIA");
init();
glutDisplayFunc(display1);
glutMainLoop();
return 0;
}
I have initailaized flag as a global variable in the beginning and set it to 1. flag is used in the display1() to ensure that it executes only once. This is just one way I was trying to ensure that the output gets displayed.
Can anyone, please HELP!
Why doesn't the program stop taking input ?
It's working. I'm still unsure about which edit or change bought about it. But it's working!! it's displaying some output

Resources